text
stringlengths 1
1.05M
|
|---|
#!/bin/bash
##########################################################################
# This script prepares Virtualbox application to be installed
# @author César Rodríguez González
# @version 1.3.3, 2017-03-19
# @license MIT
##########################################################################
# Check if the script is being running by a root or sudoer user
if [ "$(id -u)" != "0" ]; then echo ""; echo "This script must be executed by a root or sudoer user"; echo ""; exit 1; fi
# Parameters
if [ -n "$1" ]; then scriptRootFolder="$1"; else scriptRootFolder="`pwd`/../.."; fi
if [ -n "$2" ]; then username="$2"; else username="`whoami`"; fi
if [ -n "$3" ]; then homeFolder="$3"; else homeFolder="$HOME"; fi
# Add common variables
. $scriptRootFolder/common/commonVariables.properties
### Commands to add a third party repository if required
echo "deb http://download.virtualbox.org/virtualbox/debian $codename contrib" > /etc/apt/sources.list.d/virtualbox.list
wget -q https://www.virtualbox.org/download/oracle_vbox_2016.asc -O- | apt-key add -
wget -q https://www.virtualbox.org/download/oracle_vbox.asc -O- | apt-key add -
|
declare module 'mxgraph' {
/**
* @class mxUndoManager
*
* Implements a command history. When changing the graph model, an
* {@link mxUndoableChange} object is created at the start of the transaction (when
* model.beginUpdate is called). All atomic changes are then added to this
* object until the last model.endUpdate call, at which point the
* {@link mxUndoableEdit} is dispatched in an event, and added to the history inside
* {@link mxUndoManager}. This is done by an event listener in
* {@link mxEditor.installUndoHandler}.
*
* Each atomic change of the model is represented by an object (eg.
* {@link mxRootChange}, {@link mxChildChange}, {@link mxTerminalChange} etc) which contains the
* complete undo information. The {@link mxUndoManager} also listens to the
* {@link mxGraphView} and stores it's changes to the current root as insignificant
* undoable changes, so that drilling (step into, step up) is undone.
*
* This means when you execute an atomic change on the model, then change the
* current root on the view and click undo, the change of the root will be
* undone together with the change of the model so that the display represents
* the state at which the model was changed. However, these changes are not
* transmitted for sharing as they do not represent a state change.
*
* ### Example
*
* When adding an undo manager to a graph, make sure to add it
* to the model and the view as well to maintain a consistent
* display across multiple undo/redo steps.
*
* @example
* ```javascript
* var undoManager = new mxUndoManager();
* var listener(sender, evt)
* {
* undoManager.undoableEditHappened(evt.getProperty('edit'));
* };
* graph.getModel().addListener(mxEvent.UNDO, listener);
* graph.getView().addListener(mxEvent.UNDO, listener);
* ```
*
* The code creates a function that informs the undoManager
* of an undoable edit and binds it to the undo event of
* {@link mxGraphModel} and {@link mxGraphView} using
* {@link mxEventSource.addListener}.
*
* ### Event: mxEvent.CLEAR
*
* Fires after {@link clear} was invoked. This event has no properties.
*
* ### Event: mxEvent.UNDO
*
* Fires afer a significant edit was undone in {@link undo}. The `edit`
* property contains the {@link mxUndoableEdit} that was undone.
*
* ### Event: mxEvent.REDO
*
* Fires afer a significant edit was redone in {@link redo}. The `edit`
* property contains the {@link mxUndoableEdit} that was redone.
*
* ### Event: mxEvent.ADD
*
* Fires after an undoable edit was added to the history. The `edit`
* property contains the {@link mxUndoableEdit} that was added.
*/
class mxUndoManager extends mxEventSource {
/**
* Constructs a new undo manager with the given history size. If no history
* size is given, then a default size of 100 steps is used.
* @param {number} [size] max history size
*/
constructor(size?: number);
/**
* Maximum command history size. 0 means unlimited history. Default is
* 100.
* @default 100
*/
size: number;
/**
* Array that contains the steps of the command history.
*/
history: Array<mxUndoableEdit>;
/**
* Index of the element to be added next.
*/
indexOfNextAdd: number;
/**
* Returns true if the history is empty.
*/
isEmpty(): boolean;
/**
* Clears the command history.
*/
clear(): void;
/**
* Returns true if an undo is possible.
*/
canUndo(): boolean;
/**
* Undoes the last change.
*/
undo(): void;
/**
* Returns true if a redo is possible.
*/
canRedo(): boolean;
/**
* Redoes the last change.
*/
redo(): void;
/**
* Method to be called to add new undoable edits to the <history>.
*/
undoableEditHappened(undoableEdit: mxUndoableEdit): void;
/**
* Removes all pending steps after <indexOfNextAdd> from the history,
* invoking die on each edit. This is called from <undoableEditHappened>.
*/
trim(): void;
}
}
|
#!/usr/bin/env bash
# save current working directory where the test related information is stored
cwd=$(pwd)
display_help() {
echo "Usage: $0 [-r]" >&2
echo
echo " -r, --report create line reports"
echo
exit 1
}
if [ "$1" == "-h" ] ; then
echo "Usage: `basename $0` [-h] [-r]"
exit 0
fi
if [ "$1" == "-h" ] ; then
echo "Usage: `basename $0` [-h] [-r]"
exit 0
fi
# run mypy from base directory since otherwise the html report will not be done
cd ..
# check command line parameters
while :
do
case "$1" in
-h | --help)
display_help # Call your function
exit 0
;;
-r | --report)
python3 -m mypy \
--no-incremental \
--config-file ${cwd}/mypy.ini \
--linecount-report ${cwd}/mypy-report \
--html-report ${cwd}/mypy-report \
--package pde
exit 0
;;
--) # End of all options
shift
break
;;
-*)
echo "Error: Unknown option: $1" >&2
display_help
exit 1
;;
*) # No more options
python3 -m mypy \
--config-file ${cwd}/mypy.ini \
--pretty \
--package pde
exit 0 ;;
esac
done
|
function intersection(arr1, arr2) {
let result = [];
for (let num of arr1) {
if (arr2.indexOf(num) > -1) {
result.push(num);
}
}
return result;
}
const arr1 = [3, 5, 2, 1, 8];
const arr2 = [3, 8, 5, 0];
console.log(intersection(arr1, arr2));
// Output: [3, 5, 8]
|
public class EntryDataProcessor {
public EntryData createEntryData(EntryPacket entryPacket, EntryDataType entryDataType, int versionID, long expiration, boolean keepExpiration, boolean createXtnEntryInfo) {
EntryTypeDesc entryTypeDesc = entryPacket.getTypeDescriptor().getEntryTypeDesc(entryType);
int version = versionID > 0 ? versionID : entryPacket.getVersion();
long lease = (expiration > 0 || keepExpiration) ? expiration : LeaseManager.toAbsoluteTime(entryPacket.getTTL());
if (entryDataType == EntryDataType.FLAT) {
return new FlatEntryData(entryPacket.getFieldValues(), entryPacket.getDynamicProperties(),
entryTypeDesc, version, lease, createXtnEntryInfo);
} else {
return new UserTypeEntryData(entryPacket.toObject(entryType), entryTypeDesc, version, lease, createXtnEntryInfo);
}
}
}
|
#!/bin/bash
#
# Caffe training script
# Tomas Pfister 2015
if [ "$2" = "" ]; then
echo "$0 net_name gpu_id [snap_iter] [finetune:0/1]"
exit
fi
net=$1
gpu_id=$2
snap_iter=$3
finetune=$4
snap_dir="data/$net/snapshots"
snapfile="heatmap_train";
mkdir -p $snap_dir
if [ "$finetune" = "1" ]; then cmd="weights"; ext="caffemodel"; else cmd="snapshot"; ext="solverstate"; fi
if [ "$snap_iter" != "" ] && [ "$snap_iter" != "-1" ]; then snap_str="-$cmd $snap_dir/${snapfile}_iter_$snap_iter.$ext"; fi
./build/tools/caffe train $snap_str \
-gpu $gpu_id -solver models/$net/solver.prototxt 2>&1 | tee -a $snap_dir/train_0.log
|
$LOAD_PATH.push File.expand_path('lib', __dir__)
# Maintain your gem's version:
require 'helena_administration/version'
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = 'helena_administration'
s.version = HelenaAdministration::VERSION
s.authors = ['<NAME>']
s.email = ['<EMAIL>']
s.homepage = 'https://github.com/gurix/helena_administration'
s.summary = 'Helena Administration is a simple rails application that provides a user interface to manage apps running with the Helena framework.'
s.description = 'Helena Administration is a simple rails application that provides a user interface to manage apps running with the Helena framework.'
s.license = 'GPL3'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {spec}/*`.split("\n")
s.add_runtime_dependency 'codemirror-rails', '>= 5'
s.add_runtime_dependency 'kaminari-actionview'
s.add_runtime_dependency 'kaminari-mongoid'
s.add_runtime_dependency 'rails', '>= 4.2'
s.add_runtime_dependency 'slim-rails', '>= 3.0'
s.add_dependency 'haml-rails', '>= 0.5'
s.add_dependency 'helena', '~> 2.0'
s.add_dependency 'jquery-rails', '~> 4'
s.add_dependency 'mongoid', ['>= 4.0']
s.add_dependency 'mongoid-simple-tags', '~> 0.1'
s.add_dependency 'mongoid_orderable', '~> 5.2'
s.add_runtime_dependency 'sass-rails', '~> 5.0'
s.add_dependency 'breadcrumbs_on_rails', '>= 3.0.1'
s.add_dependency 'rails-i18n'
s.add_dependency 'simple_form', '~> 4'
s.add_runtime_dependency 'coffee-rails', '~> 4.1'
s.add_runtime_dependency 'font-awesome-sass'
s.add_dependency 'bootstrap', '>= 4.3.1'
s.add_dependency 'responders', '~> 2.0'
s.add_development_dependency 'rspec-collection_matchers', '~> 1'
s.add_development_dependency 'rspec-rails', '~> 3'
end
|
def parse_sentence(sentence):
tokens = nltk.word_tokenize(sentence)
tagged_tokens = nltk.pos_tag(tokens)
subject = None
predicate = None
for token, tag in tagged_tokens:
if tag == 'NN':
subject = token
elif tag == 'VBP':
predicate = token
return subject, predicate
|
#!/bin/sh
FLASK_APP=hello.py flask run -h 0.0.0.0 -p 80
|
package de.htwg.se.durak.model.gameElementsComponent
/**
* Card with Unicode and String properties
*/
trait CardInterface {
/**
* Card rank
*
* @return Card rank
*/
val rank: Int
/**
* Card symbol
*
* @return Card symbol
*/
val symbol: Int
/**
* Rank as string
*
* @return Card rank as simple string
*/
def rankString: String
/**
* Symbol as string
*
* @return Card symbol as simple string
*/
def symbolString: String
/**
* Rank as unicode
*
* @return Card rank as unicode string
*/
def rankUnicode: String
/**
* Symbol as unicode
*
* @return Card symbol as unicode string
*/
def symbolUnicode: String
/**
* Card rank and symbol as string
* example "Rang: X, Typ: Y"
*
* @return Card rank and symbol as string
*/
def toString: String
}
|
class Admin::ForumsController < Admin::BaseController
before_action :set_forum, only: [:show, :edit, :update]
def index
@forums = Forum.order(id: :desc).page(params[:page])
end
def show
end
def new
@forum = Forum.new
end
def create
@forum = Forum.new forum_params
if @forum.save
redirect_to admin_forum_url(@forum), notice: t('flash.forum_is_successfully_created')
else
render 'update_form'
end
end
def edit
end
def update
if @forum.update forum_params
redirect_to admin_forum_url(@forum), notice: t('flash.forum_is_successfully_updated')
else
render 'update_form'
end
end
private
def forum_params
params.require(:forum).permit(:name, :slug, :description)
end
def set_forum
@forum = Forum.find params[:id]
end
end
|
#Training details
#HRNet_W32_C
python train.py \
--model=HRNet_W32_C \
--batch_size=256 \
--total_images=1281167 \
--class_dim=1000 \
--lr_strategy=piecewise_decay \
--lr=0.1 \
--num_epochs=120 \
--model_save_dir=output/ \
--l2_decay=1e-4
|
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# Written by <NAME> (<EMAIL>)
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import pprint
import torch
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torch.utils.data.distributed
import torchvision.transforms as transforms
import _init_paths
from core.config import config
from core.config import update_config
from core.config import update_dir
from core.loss import JointsMSELoss
from core.function import validate
from utils.utils import create_logger
import dataset
import models
def parse_args():
parser = argparse.ArgumentParser(description='Train keypoints network')
# general
parser.add_argument('--cfg',
help='experiment configure file name',
required=True,
type=str)
args, rest = parser.parse_known_args()
# update config
update_config(args.cfg)
# training
parser.add_argument('--frequent',
help='frequency of logging',
default=config.PRINT_FREQ,
type=int)
parser.add_argument('--gpus',
help='gpus',
type=str)
parser.add_argument('--workers',
help='num of dataloader workers',
type=int)
parser.add_argument('--model-file',
help='model state file',
type=str)
parser.add_argument('--use-detect-bbox',
help='use detect bbox',
action='store_true')
parser.add_argument('--flip-test',
help='use flip test',
action='store_true')
parser.add_argument('--post-process',
help='use post process',
action='store_true')
parser.add_argument('--shift-heatmap',
help='shift heatmap',
action='store_true')
parser.add_argument('--coco-bbox-file',
help='coco detection bbox file',
type=str)
args = parser.parse_args()
return args
def reset_config(config, args):
if args.gpus:
config.GPUS = args.gpus
if args.workers:
config.WORKERS = args.workers
if args.use_detect_bbox:
config.TEST.USE_GT_BBOX = not args.use_detect_bbox
if args.flip_test:
config.TEST.FLIP_TEST = args.flip_test
if args.post_process:
config.TEST.POST_PROCESS = args.post_process
if args.shift_heatmap:
config.TEST.SHIFT_HEATMAP = args.shift_heatmap
if args.model_file:
config.TEST.MODEL_FILE = args.model_file
if args.coco_bbox_file:
config.TEST.COCO_BBOX_FILE = args.coco_bbox_file
def main():
args = parse_args()
reset_config(config, args)
logger, final_output_dir, tb_log_dir = create_logger(
config, args.cfg, 'valid')
logger.info(pprint.pformat(args))
logger.info(pprint.pformat(config))
# cudnn related setting
cudnn.benchmark = config.CUDNN.BENCHMARK
torch.backends.cudnn.deterministic = config.CUDNN.DETERMINISTIC
torch.backends.cudnn.enabled = config.CUDNN.ENABLED
model = eval('models.'+config.MODEL.NAME+'.get_pose_net')(
config, is_train=False
)
if config.TEST.MODEL_FILE:
logger.info('=> loading model from {}'.format(config.TEST.MODEL_FILE))
model.load_state_dict(torch.load(config.TEST.MODEL_FILE))
else:
model_state_file = os.path.join(final_output_dir,
'final_state.pth.tar')
logger.info('=> loading model from {}'.format(model_state_file))
model.load_state_dict(torch.load(model_state_file))
gpus = [int(i) for i in config.GPUS.split(',')]
model = torch.nn.DataParallel(model, device_ids=gpus).cuda()
# define loss function (criterion) and optimizer
criterion = JointsMSELoss(
use_target_weight=config.LOSS.USE_TARGET_WEIGHT
).cuda()
# Data loading code
# normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
# std=[0.229, 0.224, 0.225])
normalize = transforms.Normalize(mean=[0.7137, 0.6628, 0.6519], std=[0.2970, 0.3017, 0.2979])
valid_dataset = eval('dataset.'+config.DATASET.DATASET)(
config,
config.DATASET.ROOT,
config.DATASET.TEST_SET,
False,
transforms.Compose([
transforms.ToTensor(),
normalize,
])
)
valid_loader = torch.utils.data.DataLoader(
valid_dataset,
batch_size=config.TEST.BATCH_SIZE*len(gpus),
shuffle=False,
num_workers=config.WORKERS,
pin_memory=True
)
# evaluate on validation set
validate(config, valid_loader, valid_dataset, model, criterion,
final_output_dir, tb_log_dir)
if __name__ == '__main__':
main()
|
<filename>src/main/java/com/borunovv/Main.java
package com.borunovv;
import com.borunovv.http.HTTPSession;
import com.borunovv.ssl.SSLServer;
/**
* Entry point.
* <p>
* Start server on port 9096.
* You can check it via https://localhost:9096
* To stop the server gracefully visit this URL: https://localhost:9096/stop
* <p>
* Note1: ensure the 'keystore.jks' file exists in dir ./keystore/ relative to current dir.
* To generate 'self-signed' certificate for SSL into keystore.jks file use following command:
* keytool -genkey -keyalg RSA -alias selfsigned -keystore keystore.jks -storepass password -validity 10000 -keysize 2048
* <p>
* (keytool is a standard java key generation tool from JAVA_HOME/bin directory)
* <p>
* Note2: browser will prompt you about insecure certificate.
* It's ok because we use 'self-signed' certificate for development purposes only.
* For production it is need to obtain real certificate.
*/
public class Main {
private static final int PORT = 9096;
public static void main(String[] args) throws Exception {
System.out.println("Server started on port " + PORT);
System.out.println("To check it's work visit URL: https://localhost:9096/hello?world=1");
System.out.println("To stop the server visit URL: https://localhost:9096/stop");
new SSLServer(PORT,
"./keystore/keystore.jks",
"password",
HTTPSession::new)
.start();
System.out.println("Server stopped");
}
}
|
package com.sun.javafx.scene;
import com.sun.javafx.event.BasicEventDispatcher;
import com.sun.javafx.event.CompositeEventDispatcher;
import com.sun.javafx.event.EventHandlerManager;
/**
* An {@code EventDispatcher} for {@code Scene}. It is formed by a chain
* of {@code KeyboardShortcutsHandler} followed by {@code EventHandlerManager}.
*/
public class SceneEventDispatcher extends CompositeEventDispatcher {
private final KeyboardShortcutsHandler keyboardShortcutsHandler;
private final EnteredExitedHandler enteredExitedHandler;
private final EventHandlerManager eventHandlerManager;
public SceneEventDispatcher(Object eventSource) {
this(new KeyboardShortcutsHandler(), new EnteredExitedHandler(eventSource), new EventHandlerManager(eventSource));
}
public SceneEventDispatcher(
KeyboardShortcutsHandler keyboardShortcutsHandler,
EnteredExitedHandler enteredExitedHandler,
EventHandlerManager eventHandlerManager) {
this.keyboardShortcutsHandler = keyboardShortcutsHandler;
this.enteredExitedHandler = enteredExitedHandler;
this.eventHandlerManager = eventHandlerManager;
keyboardShortcutsHandler.insertNextDispatcher(enteredExitedHandler);
enteredExitedHandler.insertNextDispatcher(eventHandlerManager);
}
public final KeyboardShortcutsHandler getKeyboardShortcutsHandler() {
return keyboardShortcutsHandler;
}
public final EnteredExitedHandler getEnteredExitedHandler() {
return enteredExitedHandler;
}
public final EventHandlerManager getEventHandlerManager() {
return eventHandlerManager;
}
@Override
public BasicEventDispatcher getFirstDispatcher() {
return keyboardShortcutsHandler;
}
@Override
public BasicEventDispatcher getLastDispatcher() {
return eventHandlerManager;
}
}
|
SELECT MIN(price)
FROM products
WHERE category = 'Clothing';
|
#include <iostream>
#include <vector>
#include <memory>
#include <cstdlib>
#include <ctime>
#include <GL/glew.h>
#include <GL/gl.h>
#include <GLFW/glfw3.h>
#include <ft2build.h>
#include "shaders.h"
#include "background.h"
#include "block.h"
#include "text.h"
FT_Library ftlib;
int main()
{
srand(time(NULL));
if(!glfwInit())
{
std::cerr << "Could not initialise GLFW";
return 1;
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
/* this needs to be specified before window creation,
* otherwise the context will have a different GL version
* NOTE: keeping this to GL3.3 for backwards compabiility with older hardware.
*/
int num_modes;
const GLFWvidmode *vid_modes = glfwGetVideoModes(glfwGetPrimaryMonitor(), &num_modes);
GLFWwindow *window =
glfwCreateWindow(vid_modes[1].width,
vid_modes[1].height, // last video mode in the array is the maximum monitor resolution
"OpenTerra",
NULL, // setting this to glfwGetPrimaryMonitor() will make the window fullscreen
NULL /* this parameter can be changed to a second window
* to share contents with. No need for it though. */ );
glfwMakeContextCurrent(window);
if(glewInit() != GLEW_OK)
{
std::cerr << "Could not initialise GLEW";
return 2;
}
glewExperimental = GL_TRUE;
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
bg = compile_shader(vert, frag);
tex_shader = compile_shader(tex_vert, tex_frag);
text_shader = compile_shader(tex_vert, text_frag);
FT_Init_FreeType(&ftlib);
glClearColor(0, .4, .7, 1);
/* in case something fails, the background or the entire screen will be cyan
* in case something fails *really* badly, the screen will be black
*/
int w, h;
Background b;
std::vector< std::unique_ptr<Block> > blocks;
for(int i = -10; i<10; i++)
for(int j = -10; j < 10; j++)
{
if((i > -10 && i < 9) && (j > -10 && j < 9)) continue;
blocks.emplace_back(new Block("block.tga", i*32, j*32, 32, 32,
(i+10)*32/360.f, (j+10)*32/360.f, 1));
}
Text text("S", -5, 0);
glfwShowWindow(window);
resolution = new GLfloat[2];
do
{
glfwGetWindowSize(window, &w, &h);
glViewport(0, 0, w, h);
resolution[0] = w; resolution[1] = h;
glClear(GL_COLOR_BUFFER_BIT);
b.Draw();
for(size_t i = 0; i < blocks.size(); i++)
blocks[i]->Draw();
text.Draw();
glfwSwapBuffers(window);
glfwPollEvents();
}
while( !glfwWindowShouldClose(window) && !glfwGetKey(window, GLFW_KEY_ESCAPE));
glfwDestroyWindow(window);
return 0;
}
|
Agent.create(name: "myMBP", token: "<KEY>")
Task.create(name: "ls root directory", script: "ls -al /")
|
package cmd
import (
"excelc/makers"
"excelc/parser"
"github.com/spf13/cobra"
)
var goCmd = &cobra.Command{
Use: "go <Input> <Output",
Short: "生成Go代码",
Args: cobra.ExactValidArgs(2),
Run: func(cmd *cobra.Command, args []string) {
parser.Build(new(makers.GoMaker), args[0], args[1])
},
}
func init() {
genCmd.AddCommand(goCmd)
goCmd.Flags().StringVarP(&parser.Opts.BinaryPath, "binary-path", "", "", "序列化输出路径(相对输出路径)")
goCmd.Flags().StringVarP(&parser.Opts.PackageName, "package-name", "", "config", "包名")
goCmd.Flags().StringVarP(&parser.Opts.InstanceName, "instance-name", "", "Config", "实例名称")
_ = goCmd.MarkFlagRequired("binary-path")
}
|
#! @BASH@
# Copyright (C) 1999-2011, 2012 Free Software Foundation, Inc.
# This file is part of the GNU C Library.
# Contributed by Ulrich Drepper <drepper@gnu.org>, 1999.
# The GNU C Library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# The GNU C Library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with the GNU C Library; if not, write to the Free
# Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
# 02111-1307 USA.
memusageso='@SLIBDIR@/libmemusage.so'
memusagestat='@BINDIR@/memusagestat'
TEXTDOMAIN=libc
# Print usage message.
do_usage() {
printf >&2 $"Try \`%s --help' or \`%s --usage' for more information.\n" memusage memusage
exit 1
}
# Message for missing argument.
do_missing_arg() {
printf >&2 $"%s: option '%s' requires an argument\n" memusage "$1"
do_usage
}
# Print help message
do_help() {
printf $"Usage: memusage [OPTION]... PROGRAM [PROGRAMOPTION]...
Profile memory usage of PROGRAM.
-n,--progname=NAME Name of the program file to profile
-p,--png=FILE Generate PNG graphic and store it in FILE
-d,--data=FILE Generate binary data file and store it in FILE
-u,--unbuffered Don't buffer output
-b,--buffer=SIZE Collect SIZE entries before writing them out
--no-timer Don't collect additional information through timer
-m,--mmap Also trace mmap & friends
-?,--help Print this help and exit
--usage Give a short usage message
-V,--version Print version information and exit
The following options only apply when generating graphical output:
-t,--time-based Make graph linear in time
-T,--total Also draw graph of total memory use
--title=STRING Use STRING as title of the graph
-x,--x-size=SIZE Make graphic SIZE pixels wide
-y,--y-size=SIZE Make graphic SIZE pixels high
Mandatory arguments to long options are also mandatory for any corresponding
short options.
"
printf $"For bug reporting instructions, please see:
%s.
" "@REPORT_BUGS_TO@"
exit 0
}
do_version() {
echo 'memusage @PKGVERSION@@VERSION@'
printf $"Copyright (C) %s Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
" "2012"
printf $"Written by %s.
" "Ulrich Drepper"
exit 0
}
# These variables are local
buffer=
data=
memusagestat_args=
notimer=
png=
progname=
tracemmap=
# Process arguments. But stop as soon as the program name is found.
while test $# -gt 0; do
case "$1" in
-V | --v | --ve | --ver | --vers | --versi | --versio | --version)
do_version
;;
-\? | --h | --he | --hel | --help)
do_help
;;
--us | --usa | --usag | --usage)
echo $"Syntax: memusage [--data=FILE] [--progname=NAME] [--png=FILE] [--unbuffered]
[--buffer=SIZE] [--no-timer] [--time-based] [--total]
[--title=STRING] [--x-size=SIZE] [--y-size=SIZE]
PROGRAM [PROGRAMOPTION]..."
exit 0
;;
-n | --pr | --pro | --prog | --progn | --progna | --prognam | --progname)
if test $# -eq 1; then
do_missing_arg $1
fi
shift
progname="$1"
;;
--pr=* | --pro=* | --prog=* | --progn=* | --progna=* | --prognam=* | --progname=*)
progname=${1##*=}
;;
-p | --pn | --png)
if test $# -eq 1; then
do_missing_arg $1
fi
shift
png="$1"
;;
--pn=* | --png=*)
png=${1##*=}
;;
-d | --d | --da | --dat | --data)
if test $# -eq 1; then
do_missing_arg $1
fi
shift
data="$1"
;;
--d=* | --da=* | --dat=* | --data=*)
data=${1##*=}
;;
-u | --un | --unb | --unbu | --unbuf | --unbuff | --unbuffe | --unbuffer | --unbuffere | --unbuffered)
buffer=1
;;
-b | --b | --bu | --buf | --buff | --buffe | --buffer)
if test $# -eq 1; then
do_missing_arg $1
fi
shift
buffer="$1"
;;
--b=* | --bu=* | --buf=* | --buff=* | --buffe=* | --buffer=*)
buffer=${1##*=}
;;
--n | --no | --no- | --no-t | --no-ti | --no-tim | --no-time | --no-timer)
notimer=yes
;;
-m | --m | --mm | --mma | --mmap)
tracemmap=yes
;;
-t | --tim | --time | --time- | --time-b | --time-ba | --time-bas | --time-base | --time-based)
memusagestat_args="$memusagestat_args -t"
;;
-T | --to | --tot | --tota | --total)
memusagestat_args="$memusagestat_args -T"
;;
--tit | --titl | --title)
if test $# -eq 1; then
do_missing_arg $1
fi
shift
memusagestat_args="$memusagestat_args -s $1"
;;
--tit=* | --titl=* | --title=*)
memusagestat_args="$memusagestat_args -s ${1##*=}"
;;
-x | --x | --x- | --x-s | --x-si | --x-siz | --x-size)
if test $# -eq 1; then
do_missing_arg $1
fi
shift
memusagestat_args="$memusagestat_args -x $1"
;;
--x=* | --x-=* | --x-s=* | --x-si=* | --x-siz=* | --x-size=*)
memusagestat_args="$memusagestat_args -x ${1##*=}"
;;
-y | --y | --y- | --y-s | --y-si | --y-siz | --y-size)
if test $# -eq 1; then
do_missing_arg $1
fi
shift
memusagestat_args="$memusagestat_args -y $1"
;;
--y=* | --y-=* | --y-s=* | --y-si=* | --y-siz=* | --y-size=*)
memusagestat_args="$memusagestat_args -y ${1##*=}"
;;
--p | --p=* | --t | --t=* | --ti | --ti=* | --u)
echo >&2 $"memusage: option \`${1##*=}' is ambiguous"
do_usage
;;
--)
# Stop processing arguments.
shift
break
;;
--*)
echo >&2 $"memusage: unrecognized option \`$1'"
do_usage
;;
*)
# Unknown option. This means the rest is the program name and parameters.
break
;;
esac
shift
done
# See whether any arguments are left.
if test $# -eq 0; then
echo >&2 $"No program name given"
do_usage
fi
# This will be in the environment.
add_env="LD_PRELOAD=$memusageso"
# Generate data file name.
datafile=
if test -n "$data"; then
datafile="$data"
elif test -n "$png"; then
datafile=$(mktemp -t memusage.XXXXXX) || exit
trap 'rm -f "$datafile"; exit 1' HUP INT QUIT TERM PIPE
fi
if test -n "$datafile"; then
add_env="$add_env MEMUSAGE_OUTPUT=$datafile"
fi
# Set program name.
if test -n "$progname"; then
add_env="$add_env MEMUSAGE_PROG_NAME=$progname"
fi
# Set buffer size.
if test -n "$buffer"; then
add_env="$add_env MEMUSAGE_BUFFER_SIZE=$buffer"
fi
# Disable timers.
if test -n "$notimer"; then
add_env="$add_env MEMUSAGE_NO_TIMER=yes"
fi
# Trace mmap.
if test -n "$tracemmap"; then
add_env="$add_env MEMUSAGE_TRACE_MMAP=yes"
fi
# Execute the program itself.
eval $add_env '"$@"'
result=$?
# Generate the PNG data file if wanted and there is something to generate
# it from.
if test -n "$png" -a -n "$datafile" -a -s "$datafile"; then
# Append extension .png if it isn't already there.
case $png in
*.png) ;;
*) png="$png.png" ;;
esac
$memusagestat $memusagestat_args "$datafile" "$png"
fi
if test -z "$data" -a -n "$datafile"; then
rm -f "$datafile"
fi
exit $result
# Local Variables:
# mode:ksh
# End:
|
#!/bin/bash
# Copyright 2019 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
set -o errexit # exit when a command fails
set -o nounset # error when an undefined variable is referenced
set -o pipefail # error if the input command to a pipe fails
readonly SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
readonly FUCHSIA_ROOT="$(dirname "$(dirname "$SCRIPT_DIR")")"
# The go repos that fint depends on, given an an array of pairs of the form
# (<repo name>, <path in the tree to that vendored repo>).
# Note: fint can use any packages in //tools without declaring them here,
# because fint is part of the //tools Go module. If fint ever depends on
# packages from other top-level directories from the Fuchsia root, the
# modules corresponding to those top-level directories must be added here.
readonly GO_DEPS=(
"github.com/golang/protobuf"
"${FUCHSIA_ROOT}/third_party/golibs/github.com/golang/protobuf"
"github.com/google/subcommands"
"${FUCHSIA_ROOT}/third_party/golibs/github.com/google/subcommands"
"google.golang.org/protobuf"
"${FUCHSIA_ROOT}/third_party/golibs/github.com/protocolbuffers/protobuf-go"
)
print_usage_and_exit() {
readonly exit_code=$1
# In the case of a non-zero exit, redirect the stdout below to stderr.
if [[ $exit_code -ne 0 ]]; then
exec 1 >&2
fi
echo ""
echo "Used to bootstrap the \"fint\" tool, which provides the platform's"
echo "\"integration interface\"."
echo "See //tools/integration/README.md for more details."
echo ""
echo "usage: $(basename "$0") (-o <path>)"
echo ""
echo "options:"
echo "-o: points to a file path at which to output fint; if unsupplied, it"
echo " will be output to the current working directory."
echo ""
exit "$exit_code"
}
###############################################################################
# Returns the host platform, of the form <OS>-<architecture>.
# Globals:
# None
# Arguments:
# None
# Returns:
# The host platform, if successful.
###############################################################################
host_platform() {
readonly uname="$(uname -s -m)"
case "${uname}" in
"Linux x86_64") echo linux-x64 ;;
"Darwin x86_64") echo mac-x64 ;;
*)
echo "unsupported infrastructure platform: ${uname}" 1>&2
exit 1
;;
esac
}
###############################################################################
# Symlinks already vendored go repositories, depended on by fint, to a provided
# go path.
# Globals:
# GO_DEPS
# Arguments:
# $1 - a go path
# Returns:
# None
###############################################################################
symlink_go_deps() {
readonly gopath="$1"
for (( i=0 ; i < ${#GO_DEPS[@]} ; i += 2 )) ; do
host="${GO_DEPS[i]}"
src="${GO_DEPS[i+1]}"
dest="${gopath}/src/${host}"
mkdir -p "$(dirname "${dest}")"
if [[ -d "${dest}" ]]; then
echo "cannot symlink deps: ${dest} already exists"
fi
ln -s "${src}" "${dest}"
done
}
# The `realpath` command is not available on all systems, so we reimplement it
# here in pure bash. It converts relative paths to absolute, and leaves
# absolute paths as-is.
realpath() {
[[ $1 = /* ]] && echo "$1" || echo "$PWD/${1#./}"
}
main() {
local output
output="$(pwd)/fint"
while getopts 'ho:' opt; do
case "$opt" in
h) print_usage_and_exit 0 ;;
o) output=$(realpath "${OPTARG}") ;;
?) print_usage_and_exit 1 ;;
esac
done
readonly GOPATH=$(mktemp -d "${TMPDIR:-/tmp}/fint.XXXXX")
rm_gopath() {
rm -r -f "${GOPATH}"
}
trap rm_gopath EXIT
export GOPATH
symlink_go_deps "${GOPATH}"
# Execute `go build` from the fuchsia root, as the package to build must be
# supplied as a relative path.
readonly go_bin="${FUCHSIA_ROOT}/prebuilt/third_party/go/$(host_platform)/bin/go"
# Go commands must be run in //tools, because each top-level directory is its
# own Go module.
cd "${FUCHSIA_ROOT}/tools" && ${go_bin} build -o "${output}" "./integration/cmd/fint"
}
main "$@"
|
<reponame>a2441918/augur-project
import test from 'ava';
import React from 'react';
import {shallow, mount} from 'enzyme';
import {renderJSX, JSX} from 'jsx-test-helpers';
import ButtonContainer from '../components/reuse/ButtonContainer/ButtonContainer';
function FakeComponent() {}
test('renders children when passed in', () => {
expect(renderJSX(<ButtonContainer />)).toMatch(
JSX(<FakeComponent />)
);
});
test('renders component when prop is passed', () => {
const wrapper = mount(
<ButtonContainer id={1}/>
);
expect(wrapper.props().id).to.equal(1);
});
|
<reponame>ministryofjustice/prison-visits-2
require_relative 'concrete_slot_type'
class NormalisedConcreteSlotType < ConcreteSlotType
def cast(value)
Nomis::ApiSlotNormaliser.new(value).slot
end
end
|
. ./_env.sh
parentcontainertag=$containertag
containertag=$parentcontainertag-jgi
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-only-pad/7-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-only-pad/7-512+0+512-N-VB-IP-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function replace_all_but_nouns_and_verbs_first_half_quarter --eval_function penultimate_quarter_eval
|
# -*- coding:utf-8 -*-
'''
传输层包解析
'''
import dpkt
#定义包解析类
class TcpAnylast(object):
'''数据报文传输层分解'''
def __init__(self,packet):
'''初始化传输层数据'''
self.packet = packet
def getSrc(self):
'''返回源端口'''
return self.packet.sport
def getDst(self):
'''返回目的端口'''
return self.packet.dport
def getAckNumber(self):
'''返回确认号'''
return self.packet.ack
def getSequence(self):
return self.packet.seq
def getFlags(self):
'''返回TCP标志位'''
flags = str(bin(self.packet.flags)).split("b")[-1]
size = 12 - len(flags)
flags = list("%s%s"%('0'*size,str(flags)))
urg = flags[6]
ack = flags[7]
psh = flags[8]
rst = flags[9]
syn = flags[10]
fin = flags[11]
return (urg,ack,psh,rst,syn,fin)
def getWindows(self):
'''返回窗口值'''
return self.packet.win
def getUrgentPointer(slef):
'''返回报文的ID'''
return self.packet.urp
def getSize(self):
'''获取数据包大小'''
return self.packet.len
def getData(self):
'''获取传输层数据'''
return self.packet.data
|
#!/bin/bash
# This script is executed bt Gradle to start the React packager for Debug
# targets.
THIS_DIR=$(cd -P "$(dirname "$(readlink "${BASH_SOURCE[0]}" || echo "${BASH_SOURCE[0]}")")" && pwd)
export RCT_METRO_PORT="${RCT_METRO_PORT:=8081}"
echo "export RCT_METRO_PORT=${RCT_METRO_PORT}" > "${THIS_DIR}/../../node_modules/react-native/scripts/.packager.env"
if nc -w 5 -z localhost ${RCT_METRO_PORT} ; then
if ! curl -s "http://localhost:${RCT_METRO_PORT}/status" | grep -q "packager-status:running" ; then
echo "Port ${RCT_METRO_PORT} already in use, packager is either not running or not running correctly"
exit 2
fi
else
CMD="${THIS_DIR}/../../node_modules/react-native/scripts/launchPackager.command"
if [[ `uname` == "Darwin" ]]; then
open -g "${CMD}" || echo "Can't start packager automatically"
else
xdg-open "${CMD}" || echo "Can't start packager automatically"
fi
fi
|
#!/bin/bash
#
# Prisma Node.JS packages publish script
#
# Build Order
# prisma-client-lib
# prisma-generate-schema
# prisma-db-introspection
# prisma-yml
# prisma-cli-engine
# prisma-cli-core
# prisma-cli
set -e
set -x
#
# Normalize CIRCLE_BRANCH
#
if [[ -z "$CIRCLE_BRANCH" ]]; then
if [[ $CIRCLE_TAG == "*beta" ]]; then
export CIRCLE_BRANCH=beta
fi
if [[ $CIRCLE_TAG == "*alpha" ]]; then
export CIRCLE_BRANCH=alpha
fi
fi
if [[ -z "$CIRCLE_BRANCH" ]]; then
export CIRCLE_BRANCH=master
fi
if [ -z "$CIRCLE_TAG" ] && [ $CIRCLE_BRANCH == "master" ]; then
echo "Builds on master are only executed when a tag is provided"
exit 0
fi
if [ $CIRCLE_TAG ] && [ $CIRCLE_BRANCH != "master" ]; then
echo "The Tag ${CIRCLE_TAG} has been set, but the branch is set to ${CIRCLE_BRANCH}. Tags are only allowed on master"
fi
if [ $CIRCLE_TAG ] && [ $CIRCLE_BRANCH == "master" ]; then
echo "WARNING: CIRCLE_TAG is set to $CIRCLE_TAG. This will publish a new version on the @latest tag."
else
echo "INFO: This will deploy a new version on the @$CIRCLE_BRANCH tag"
fi
if [[ $CIRCLE_COMPARE_URL ]]; then
export lastCommits=`echo $CIRCLE_COMPARE_URL | sed -n 's/.*compare\/\(.*\)/\1/p' | sed 's/\.\.\./ /'`
else
export lastCommits="HEAD"
fi
#
# Detect change
#
export changedFiles=$(git diff-tree --no-commit-id --name-only -r $lastCommits)
ymlChanged=false
introspectionChanged=false
coreChanged=false
engineChanged=false
clientChanged=false
generateSchemaChanged=false
if [[ "$changedFiles" = *"cli/packages/prisma-yml"* ]]; then
ymlChanged=true
fi
if [[ "$changedFiles" = *"cli/packages/prisma-db-introspection"* ]]; then
introspectionChanged=true
fi
if [[ "$changedFiles" = *"cli/packages/prisma-cli-core"* ]]; then
coreChanged=true
fi
if [[ "$changedFiles" = *"cli/packages/prisma-cli-engine"* ]]; then
engineChanged=true
fi
if [[ "$changedFiles" = *"client"* ]]; then
clientChanged=true
fi
if [[ "$changedFiles" = *"cli/packages/prisma-generate-schema"* ]]; then
generateSchemaChanged=true
fi
echo "introspection changed: $introspectionChanged yml changed: $ymlChanged. core changed: $coreChanged. engine changed: $engineChanged"
if [ $introspectionChanged == false ] && [ $ymlChanged == false ] && [ $coreChanged == false ] && [ $engineChanged == false ] && [ $clientChanged == false ] && [ $generateSchemaChanged == false ] && [ -z "$CIRCLE_TAG" ]; then
echo "There are no changes in the CLI."
exit 0;
fi
#
# Get docker tag
#
latestVersion=$(npm info prisma version)
tag=${CIRCLE_TAG:-$latestVersion}
tagElements=(${tag//./ })
nextDockerMinor=${tagElements[1]}
if [[ $CIRCLE_TAG ]] && [[ $CIRCLE_BRANCH == "master" ]]; then
nextDockerTag="${tagElements[0]}.${nextDockerMinor}"
else
step=1
if [ $CIRCLE_BRANCH == "alpha" ]; then
step=2
fi
if [[ -n "$CIRCLE_TAG" ]]; then
echo "Setting Step to 0"
step=0
fi
nextDockerMinor=$((nextDockerMinor + step))
nextDockerTag="${tagElements[0]}.${nextDockerMinor}-${CIRCLE_BRANCH}"
fi
node cli/scripts/waitUntilTagPublished.js $nextDockerTag
#
# Get new version
#
if [ -z "$CIRCLE_TAG" ]; then
latestBetaVersion=$(npm info prisma-client-lib version --tag $CIRCLE_BRANCH)
latestVersionElements=(${latestVersion//./ })
latestBetaVersionElements=(${latestBetaVersion//./ })
betaMinor=${latestBetaVersionElements[1]}
latestMinor=${latestVersionElements[1]}
latestMajor=${latestVersionElements[0]}
betaLastNumber=`echo $latestBetaVersion | sed -n "s/.*$CIRCLE_BRANCH\.\([0-9]\{1,\}\)/\1/p"`
echo "betaLastNumber $betaLastNumber"
# calc next minor
step=1
if [ $CIRCLE_BRANCH == "alpha" ]; then
step=2
fi
nextMinor=$((latestMinor + step))
nextLastNumber=0
echo "beta minor $betaMinor latest minor $latestMinor next minor ${nextMinor}"
# calc next last number
if [ $betaMinor > $latestMinor ] && [ $betaMinor != $latestMinor ]; then
echo "$betaMinor is greater than $latestMinor"
nextLastNumber=$((betaLastNumber + step + 1))
fi
export newVersion="$latestMajor.$nextMinor.0-$CIRCLE_BRANCH.$nextLastNumber"
echo "new version: $newVersion"
else
export newVersion=$CIRCLE_TAG
fi
######################
# Build cli/packages #
######################
cd cli/packages/
#
# Build prisma-generate-schema
#
if [ $generateSchemaChanged ] || [ $clientChanged ] || [ $coreChanged ]; then
cd prisma-generate-schema
sleep 3.0
../../scripts/doubleInstall.sh
yarn build
npm version $newVersion
if [[ $CIRCLE_TAG ]]; then
npm publish
else
npm publish --tag $CIRCLE_BRANCH
fi
cd ..
fi
export generateSchemaVersion=$(cat prisma-generate-schema/package.json | jq -r '.version')
#
# Build prisma-client-lib
#
cd ../../client
export clientVersionBefore=$(cat package.json | jq -r '.version')
if [ $clientChanged ] || [ $CIRCLE_TAG ]; then
echo "Going to publish client"
yarn install
yarn build
npm version $newVersion
yarn add prisma-generate-schema@$newVersion
if [[ $CIRCLE_TAG ]]; then
npm publish
else
npm publish --tag $CIRCLE_BRANCH
fi
yarn install
fi
export clientVersion=$(cat package.json | jq -r '.version')
cd ../cli/packages
########################
# Back to cli/packages #
########################
#
# Build prisma-yml
#
export ymlVersionBefore=$(cat prisma-yml/package.json | jq -r '.version')
if [ $ymlChanged ] || [ $CIRCLE_TAG ]; then
echo "Going to publish yml"
cd prisma-yml
../../scripts/doubleInstall.sh
yarn build
npm version $newVersion
if [[ $CIRCLE_TAG ]]; then
npm publish
else
npm publish --tag $CIRCLE_BRANCH
fi
../../scripts/doubleInstall.sh
cd ..
fi
export ymlVersion=$(cat prisma-yml/package.json | jq -r '.version')
#
# Build prisma-cli-engine
#
if [ $ymlVersionBefore != $ymlVersion ] || [ $engineChanged ]; then
cd prisma-cli-engine
sleep 3.0
yarn add prisma-yml@$ymlVersion
sleep 0.2
../../scripts/doubleInstall.sh
yarn build
npm version $newVersion
if [[ $CIRCLE_TAG ]]; then
npm publish
else
npm publish --tag $CIRCLE_BRANCH
fi
cd ..
fi
export engineVersion=$(cat prisma-cli-engine/package.json | jq -r '.version')
#
# Build prisma-db-introspection
#
export introspectionVersionBefore=$(cat prisma-db-introspection/package.json | jq -r '.version')
if [ $ymlVersionBefore != $ymlVersion ] || [ $introspectionChanged ] || [ $CIRCLE_TAG ]; then
cd prisma-db-introspection
sleep 0.5
yarn add prisma-yml@$ymlVersion
sleep 0.2
../../scripts/doubleInstall.sh
yarn build
npm version $newVersion
if [[ $CIRCLE_TAG ]]; then
npm publish
else
npm publish --tag $CIRCLE_BRANCH
fi
cd ..
fi
export introspectionVersion=$(cat prisma-db-introspection/package.json | jq -r '.version')
#
# Build prisma-cli-core
#
if [ $ymlVersionBefore != $ymlVersion ] || [ $coreChanged ] || [ $introspectionChanged ]; then
cd prisma-cli-core
sleep 3.0
yarn add prisma-yml@$ymlVersion
sleep 0.2
yarn add prisma-db-introspection@$introspectionVersion
sleep 0.5
yarn add prisma-generate-schema@$generateSchemaVersion
sleep 0.2
yarn add prisma-client-lib@$clientVersion
sleep 0.3
../../scripts/doubleInstall.sh
# new docker tag
sed -i.bak "s/image: prismagraphql\/prisma:[0-9]\{1,\}\.[0-9]\{1,\}/image: prismagraphql\/prisma:$nextDockerTag/g" src/util.ts
cat src/util.ts
yarn build
npm version $newVersion
if [[ $CIRCLE_TAG ]]; then
npm publish
else
npm publish --tag $CIRCLE_BRANCH
fi
cd ..
fi
export coreVersion=$(cat prisma-cli-core/package.json | jq -r '.version')
#
# Build prisma
#
cd prisma-cli
cp ../../../README.md ./
sleep 0.5
yarn add prisma-cli-engine@$engineVersion prisma-cli-core@$coreVersion
../../scripts/doubleInstall.sh
yarn build
if [[ -n "$CIRCLE_TAG" ]] && [[ "$CIRCLE_BRANCH" == "master" ]]; then
newVersion=$CIRCLE_TAG
echo "new version: $newVersion"
npm version $newVersion
npm publish
else
npm version $newVersion
npm publish --tag $CIRCLE_BRANCH
fi
|
<filename>core/src/mindustry/entities/units/Statuses.java
package mindustry.entities.units;
import arc.struct.Bits;
import arc.struct.*;
import arc.graphics.*;
import arc.util.*;
import arc.util.pooling.*;
import mindustry.content.*;
import mindustry.ctype.ContentType;
import mindustry.entities.traits.*;
import mindustry.entities.type.*;
import mindustry.type.*;
import java.io.*;
import static mindustry.Vars.content;
/** Class for controlling status effects on an entity. */
public class Statuses implements Saveable{
private static final StatusEntry globalResult = new StatusEntry();
private static final Array<StatusEntry> removals = new Array<>();
private Array<StatusEntry> statuses = new Array<>();
private Bits applied = new Bits(content.getBy(ContentType.status).size);
private float speedMultiplier;
private float damageMultiplier;
private float armorMultiplier;
public void handleApply(Unit unit, StatusEffect effect, float duration){
if(effect == StatusEffects.none || effect == null || unit.isImmune(effect)) return; //don't apply empty or immune effects
if(statuses.size > 0){
//check for opposite effects
for(StatusEntry entry : statuses){
//extend effect
if(entry.effect == effect){
entry.time = Math.max(entry.time, duration);
return;
}else if(entry.effect.reactsWith(effect)){ //find opposite
globalResult.effect = entry.effect;
entry.effect.getTransition(unit, effect, entry.time, duration, globalResult);
entry.time = globalResult.time;
if(globalResult.effect != entry.effect){
entry.effect = globalResult.effect;
}
//stop looking when one is found
return;
}
}
}
//otherwise, no opposites found, add direct effect
StatusEntry entry = Pools.obtain(StatusEntry.class, StatusEntry::new);
entry.set(effect, duration);
statuses.add(entry);
}
public Color getStatusColor(){
if(statuses.size == 0){
return Tmp.c1.set(Color.white);
}
float r = 0f, g = 0f, b = 0f;
for(StatusEntry entry : statuses){
r += entry.effect.color.r;
g += entry.effect.color.g;
b += entry.effect.color.b;
}
return Tmp.c1.set(r / statuses.size, g / statuses.size, b / statuses.size, 1f);
}
public void clear(){
statuses.clear();
}
public void update(Unit unit){
applied.clear();
speedMultiplier = damageMultiplier = armorMultiplier = 1f;
if(statuses.size == 0) return;
removals.clear();
for(StatusEntry entry : statuses){
entry.time = Math.max(entry.time - Time.delta(), 0);
applied.set(entry.effect.id);
if(entry.time <= 0){
Pools.free(entry);
removals.add(entry);
}else{
speedMultiplier *= entry.effect.speedMultiplier;
armorMultiplier *= entry.effect.armorMultiplier;
damageMultiplier *= entry.effect.damageMultiplier;
entry.effect.update(unit, entry.time);
}
}
if(removals.size > 0){
statuses.removeAll(removals, true);
}
}
public float getSpeedMultiplier(){
return speedMultiplier;
}
public float getDamageMultiplier(){
return damageMultiplier;
}
public float getArmorMultiplier(){
return armorMultiplier;
}
public boolean hasEffect(StatusEffect effect){
return applied.get(effect.id);
}
@Override
public void writeSave(DataOutput stream) throws IOException{
stream.writeByte(statuses.size);
for(StatusEntry entry : statuses){
stream.writeByte(entry.effect.id);
stream.writeFloat(entry.time);
}
}
@Override
public void readSave(DataInput stream, byte version) throws IOException{
for(StatusEntry effect : statuses){
Pools.free(effect);
}
statuses.clear();
byte amount = stream.readByte();
for(int i = 0; i < amount; i++){
byte id = stream.readByte();
float time = stream.readFloat();
StatusEntry entry = Pools.obtain(StatusEntry.class, StatusEntry::new);
entry.set(content.getByID(ContentType.status, id), time);
statuses.add(entry);
}
}
}
|
import React, { useState, useEffect } from "react";
const App = () => {
const [data, setData] = useState(null);
useEffect(() => {
const fetchData = async () => {
// make a POST request to a JSON API
const response = await fetch('http://example.com/api/fetch-data', {
method: 'POST'
});
// parse response to JSON format and set to state
const data = await response.json();
setData(data);
};
fetchData();
}, []);
return <div>{JSON.stringify(data)}</div>;
};
export default App;
|
<gh_stars>1-10
package org.moskito.control.ui.action;
import net.anotheria.anoprise.mocking.MockFactory;
import net.anotheria.maf.action.ActionMapping;
import net.anotheria.maf.action.CommandRedirect;
import org.junit.Test;
import javax.servlet.http.HttpServletRequest;
import static org.junit.Assert.assertEquals;
/**
* TODO comment this class
*
* @author lrosenberg
* @since 07.07.13 00:00
*/
public class SelectViewActionTest {
@Test public void testSetView(){
SelectViewAction a = new SelectViewAction();
HttpServletRequest request = MockFactory.createMock(HttpServletRequest.class, new org.moskito.control.ui.action.GetParameterMocking("view", "myview"), new org.moskito.control.ui.action.HttpServletRequestMocking());
a.execute(new ActionMapping(null, null, new CommandRedirect("dummy", "dummy")), null, request, null);
assertEquals("myview", a.getCurrentViewName(request));
}
}
|
<gh_stars>0
/**
* @ngdoc function
* @name foodCircle.controller:ErrorpageCtrl
* @description
* # ErrorpageCtrl
* Controller of the foodCircle
*/
/*global
angular
*/
(function () {
'use strict';
angular.module('foodCircle').controller('ErrorpageCtrl', ['$rootScope', '$state', '$stateParams', function ($rootScope, $state, $stateParams) {
var vm = this;
vm.errorMessage = $stateParams.error.message;
vm.reason = $stateParams.reason;
vm.toPrevious = function () {
if (!$rootScope.previousState || $rootScope.previousState === '') {
$rootScope.previousState = 'main.home';
}
$state.go($rootScope.previousState, $rootScope.previousStateParams);
};
}]);
}());
|
#!/bin/bash
# shellcheck disable=SC1091
################################################################################
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# shellcheck source=sbin/common/constants.sh
source "$SCRIPT_DIR/../../sbin/common/constants.sh"
export ANT_HOME=/cygdrive/C/Projects/OpenJDK/apache-ant-1.10.1
export DRAGONWELL8_BOOTSTRAP=/cygdrive/C/openjdk/dragonwell-bootstrap/jdk8u272-ga
export ALLOW_DOWNLOADS=true
export LANG=C
export OPENJ9_NASM_VERSION=2.13.03
export OPENSSL_VERSION=1.1.1m
TOOLCHAIN_VERSION=""
if [ "$ARCHITECTURE" == "aarch64" ]; then
# Windows aarch64 cross compiles requires same version boot jdk
echo "Cross compile of aarch64 on Windows uses same boot jdk as build version, using: ${JAVA_FEATURE_VERSION}"
JDK_BOOT_VERSION="$((JAVA_FEATURE_VERSION))"
fi
BOOT_JDK_VARIABLE="JDK${JDK_BOOT_VERSION}_BOOT_DIR"
if [ ! -d "$(eval echo "\$$BOOT_JDK_VARIABLE")" ]; then
bootDir="$PWD/jdk-$JDK_BOOT_VERSION"
# Note we export $BOOT_JDK_VARIABLE (i.e. JDKXX_BOOT_DIR) here
# instead of BOOT_JDK_VARIABLE (no '$').
export "${BOOT_JDK_VARIABLE}"="$bootDir"
if [ ! -x "$bootDir/bin/javac.exe" ]; then
# Set to a default location as linked in the ansible playbooks
if [ -x "/cygdrive/c/openjdk/jdk-${JDK_BOOT_VERSION}/bin/javac" ]; then
echo "Could not use ${BOOT_JDK_VARIABLE} - using /cygdrive/c/openjdk/jdk-${JDK_BOOT_VERSION}"
# shellcheck disable=SC2140
export "${BOOT_JDK_VARIABLE}"="/cygdrive/c/openjdk/jdk-${JDK_BOOT_VERSION}"
elif [ "$JDK_BOOT_VERSION" -ge 8 ]; then # Adopt has no build pre-8
# This is needed to convert x86-32 to x32 which is what the API uses
export downloadArch
case "$ARCHITECTURE" in
"x86-32") downloadArch="x32";;
"aarch64") downloadArch="x64";;
*) downloadArch="$ARCHITECTURE";;
esac
releaseType="ga"
vendor="adoptium"
apiUrlTemplate="https://api.\${vendor}.net/v3/binary/latest/\${JDK_BOOT_VERSION}/\${releaseType}/windows/\${downloadArch}/jdk/hotspot/normal/\${vendor}"
apiURL=$(eval echo ${apiUrlTemplate})
echo "Downloading GA release of boot JDK version ${JDK_BOOT_VERSION} from ${apiURL}"
# make-adopt-build-farm.sh has 'set -e'. We need to disable that for
# the fallback mechanism, as downloading of the GA binary might fail
set +e
wget -q "${apiURL}" -O openjdk.zip
retVal=$?
set -e
if [ $retVal -ne 0 ]; then
# We must be a JDK HEAD build for which no boot JDK exists other than
# nightlies?
echo "Downloading GA release of boot JDK version ${JDK_BOOT_VERSION} failed."
# shellcheck disable=SC2034
releaseType="ea"
# shellcheck disable=SC2034
vendor="adoptium"
apiURL=$(eval echo ${apiUrlTemplate})
echo "Attempting to download EA release of boot JDK version ${JDK_BOOT_VERSION} from ${apiURL}"
set +e
wget -q "${apiURL}" -O openjdk.zip
retVal=$?
set -e
if [ $retVal -ne 0 ]; then
# If no binaries are available then try from adoptopenjdk
echo "Downloading Temurin release of boot JDK version ${JDK_BOOT_VERSION} failed."
# shellcheck disable=SC2034
releaseType="ga"
# shellcheck disable=SC2034
vendor="adoptopenjdk"
apiURL=$(eval echo ${apiUrlTemplate})
echo "Attempting to download GA release of boot JDK version ${JDK_BOOT_VERSION} from ${apiURL}"
wget -q "${apiURL}" -O openjdk.zip
fi
fi
unzip -q openjdk.zip
mv "$(ls -d jdk-"${JDK_BOOT_VERSION}"*)" "$bootDir"
fi
fi
fi
# shellcheck disable=SC2155
export JDK_BOOT_DIR="$(eval echo "\$$BOOT_JDK_VARIABLE")"
"$JDK_BOOT_DIR/bin/java" -version 2>&1 | sed 's/^/BOOT JDK: /'
"$JDK_BOOT_DIR/bin/java" -version > /dev/null 2>&1
executedJavaVersion=$?
if [ $executedJavaVersion -ne 0 ]; then
echo "Failed to obtain or find a valid boot jdk"
exit 1
fi
"$JDK_BOOT_DIR/bin/java" -version 2>&1 | sed 's/^/BOOT JDK: /'
if [ "${ARCHITECTURE}" == "x86-32" ]
then
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --disable-ccache --with-target-bits=32 --target=x86"
if [ "${VARIANT}" == "${BUILD_VARIANT_OPENJ9}" ]
then
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --with-openssl=/cygdrive/c/openjdk/OpenSSL-${OPENSSL_VERSION}-x86_32-VS2013 --enable-openssl-bundling"
if [ "${JAVA_TO_BUILD}" == "${JDK8_VERSION}" ]
then
export BUILD_ARGS="${BUILD_ARGS} --freetype-version 2.5.3"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --with-freemarker-jar=/cygdrive/c/openjdk/freemarker.jar"
# https://github.com/adoptium/temurin-build/issues/243
export INCLUDE="C:\Program Files\Debugging Tools for Windows (x64)\sdk\inc;$INCLUDE"
export PATH="/c/cygwin64/bin:/usr/bin:$PATH"
TOOLCHAIN_VERSION="2013"
elif [ "${JAVA_TO_BUILD}" == "${JDK11_VERSION}" ]
then
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --with-freemarker-jar=/cygdrive/c/openjdk/freemarker.jar"
# Next line a potentially tactical fix for https://github.com/adoptium/temurin-build/issues/267
export PATH="/usr/bin:$PATH"
fi
# LLVM needs to be before cygwin as at least one machine has 64-bit clang in cygwin #813
# NASM required for OpenSSL support as per #604
export PATH="/cygdrive/c/Program Files (x86)/LLVM/bin:/cygdrive/c/openjdk/nasm-$OPENJ9_NASM_VERSION:$PATH"
else
if [ "${JAVA_TO_BUILD}" == "${JDK8_VERSION}" ]
then
TOOLCHAIN_VERSION="2013"
export BUILD_ARGS="${BUILD_ARGS} --freetype-version 2.5.3"
export PATH="/cygdrive/c/openjdk/make-3.82/:$PATH"
elif [ "${JAVA_TO_BUILD}" == "${JDK11_VERSION}" ]
then
TOOLCHAIN_VERSION="2017"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --disable-ccache"
elif [ "$JAVA_FEATURE_VERSION" -gt 11 ]
then
TOOLCHAIN_VERSION="2019"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --disable-ccache"
fi
fi
fi
if [ "${ARCHITECTURE}" == "x64" ]
then
if [ "${VARIANT}" == "${BUILD_VARIANT_OPENJ9}" ]
then
export HAS_AUTOCONF=1
export BUILD_ARGS="${BUILD_ARGS} --freetype-version 2.5.3"
if [ "${JAVA_TO_BUILD}" == "${JDK8_VERSION}" ]
then
export BUILD_ARGS="${BUILD_ARGS} --freetype-version 2.5.3"
export INCLUDE="C:\Program Files\Debugging Tools for Windows (x64)\sdk\inc;$INCLUDE"
export PATH="$PATH:/c/cygwin64/bin"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --with-freemarker-jar=/cygdrive/c/openjdk/freemarker.jar --disable-ccache"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --with-openssl=/cygdrive/c/openjdk/OpenSSL-${OPENSSL_VERSION}-x86_64-VS2013 --enable-openssl-bundling"
TOOLCHAIN_VERSION="2013"
elif [ "${JAVA_TO_BUILD}" == "${JDK9_VERSION}" ]
then
TOOLCHAIN_VERSION="2013"
export BUILD_ARGS="${BUILD_ARGS} --freetype-version 2.5.3"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --with-freemarker-jar=/cygdrive/c/openjdk/freemarker.jar"
elif [ "${JAVA_TO_BUILD}" == "${JDK10_VERSION}" ]
then
export BUILD_ARGS="${BUILD_ARGS} --freetype-version 2.5.3"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --with-freemarker-jar=/cygdrive/c/openjdk/freemarker.jar"
elif [ "${JAVA_TO_BUILD}" == "${JDK11_VERSION}" ]
then
TOOLCHAIN_VERSION="2017"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --with-freemarker-jar=/cygdrive/c/openjdk/freemarker.jar --with-openssl=/cygdrive/c/openjdk/OpenSSL-${OPENSSL_VERSION}-x86_64-VS2017 --enable-openssl-bundling"
elif [ "$JAVA_FEATURE_VERSION" -gt 11 ]
then
TOOLCHAIN_VERSION="2019"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --with-freemarker-jar=/cygdrive/c/openjdk/freemarker.jar --with-openssl=/cygdrive/c/openjdk/OpenSSL-${OPENSSL_VERSION}-x86_64-VS2019 --enable-openssl-bundling"
fi
CUDA_VERSION=9.0
CUDA_HOME_FULL="C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v$CUDA_VERSION"
# use cygpath to map to 'short' names (without spaces)
CUDA_HOME=$(cygpath -ms "$CUDA_HOME_FULL")
if [[ $CUDA_HOME == *" "* ]]; then
echo "[ERROR] All CUDA_HOME path folders must have either (a) no spaces, or (b) a shortened version configured in the environment."
echo "CUDA_HOME unshortened: ${CUDA_HOME_FULL}"
echo "CUDA_HOME shortened: ${CUDA_HOME}"
exit 1
fi
if [ -f "$(cygpath -u "$CUDA_HOME"/include/cuda.h)" ]
then
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --enable-cuda --with-cuda=$CUDA_HOME"
else
echo "[ERROR] The following file could not be found: $CUDA_HOME/include/cuda.h"
echo "Please check that CUDA is correctly installed."
exit 1
fi
# LLVM needs to be before cygwin as at least one machine has clang in cygwin #813
# NASM required for OpenSSL support as per #604
export PATH="/cygdrive/c/Program Files/LLVM/bin:/usr/bin:/cygdrive/c/openjdk/nasm-$OPENJ9_NASM_VERSION:$PATH"
else
TOOLCHAIN_VERSION="2013"
if [ "${JAVA_TO_BUILD}" == "${JDK8_VERSION}" ]
then
export BUILD_ARGS="${BUILD_ARGS} --freetype-version 2.5.3"
export PATH="/cygdrive/c/openjdk/make-3.82/:$PATH"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --disable-ccache"
elif [ "${JAVA_TO_BUILD}" == "${JDK9_VERSION}" ]
then
export BUILD_ARGS="${BUILD_ARGS} --freetype-version 2.5.3"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --disable-ccache"
elif [ "${JAVA_TO_BUILD}" == "${JDK10_VERSION}" ]
then
export BUILD_ARGS="${BUILD_ARGS} --freetype-version 2.5.3"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --disable-ccache"
elif [ "${JAVA_TO_BUILD}" == "${JDK11_VERSION}" ]
then
TOOLCHAIN_VERSION="2017"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --disable-ccache"
elif [ "$JAVA_FEATURE_VERSION" -gt 11 ]
then
TOOLCHAIN_VERSION="2019"
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --disable-ccache"
fi
fi
if [ "${VARIANT}" == "${BUILD_VARIANT_DRAGONWELL}" ] && [ "${JAVA_TO_BUILD}" == "${JDK8_VERSION}" ]
then
if [[ -d "${DRAGONWELL8_BOOTSTRAP}" ]]; then
export JDK_BOOT_DIR="${DRAGONWELL8_BOOTSTRAP}"
fi
fi
fi
if [ "${ARCHITECTURE}" == "aarch64" ]; then
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --disable-ccache --openjdk-target=aarch64-unknown-cygwin --with-build-jdk=$JDK_BOOT_DIR"
fi
if [ -n "${TOOLCHAIN_VERSION}" ]; then
export CONFIGURE_ARGS_FOR_ANY_PLATFORM="${CONFIGURE_ARGS_FOR_ANY_PLATFORM} --with-toolchain-version=${TOOLCHAIN_VERSION}"
fi
|
<reponame>JielingWang/ENGINE-backend
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = unpackXlsxFile;
var _fs = require('fs');
var _fs2 = _interopRequireDefault(_fs);
var _stream = require('stream');
var _stream2 = _interopRequireDefault(_stream);
var _unzipper = require('unzipper');
var _unzipper2 = _interopRequireDefault(_unzipper);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Reads XLSX file in Node.js.
* @param {(string|Stream)} input - A Node.js readable stream or a path to a file.
* @return {Promise} Resolves to an object holding XLSX file entries.
*/
function unpackXlsxFile(input) {
// XLSX file is a zip archive.
// The `entries` object stores the files
// and their contents from this XLSX zip archive.
var entries = {};
var stream = input instanceof _stream2.default ? input : _fs2.default.createReadStream(input);
return new Promise(function (resolve, reject) {
var entryPromises = [];
stream
// This first "error" listener is for the original stream errors.
.on('error', reject).pipe(_unzipper2.default.Parse())
// This second "error" listener is for the unzip stream errors.
.on('error', reject).on('close', function () {
return Promise.all(entryPromises).then(function () {
return resolve(entries);
});
}).on('entry', function (entry) {
var contents = '';
// To ignore an entry: `entry.autodrain()`.
entryPromises.push(new Promise(function (resolve) {
entry.on('data', function (data) {
return contents += data.toString();
}).on('end', function () {
return resolve(entries[entry.path] = contents);
});
}));
});
});
}
//# sourceMappingURL=unpackXlsxFileNode.js.map
|
#!/bin/bash
#SBATCH --gres=gpu:v100l:1
#SBATCH --cpus-per-task=4
#SBATCH --mem=32G
#SBATCH --time=2-12:00:00
#SBATCH --job-name=giraffe.carlaCars256.train
#SBATCH --output=/scratch/cchen795/slurm/%x-%j.out
#SBATCH --error=/scratch/cchen795/slurm/%x-%j.out
echo "load modules and Python environment"
source $HOME/scratch/py38giraffe.sh
#python test_train.py configs/64res/cars_64.yaml
python train.py configs/256res/carlacars_256.yaml
|
<filename>public/assets/js/as/auth-frontend.js
function isValidEmail(mail) {
return /^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,4})+$/.test(mail);
}
$(document).ready(
function() {
$('body').on('keydown','.solo-numero',function (event){
if (event.keyCode == 13 || event.keyCode == 9) {
return true;
}
if(event.shiftKey)
{
event.preventDefault();
}
if (event.keyCode == 46 || event.keyCode == 8) {
}
else {
if (event.keyCode < 95) {
if (event.keyCode < 48 || event.keyCode > 57) {
event.preventDefault();
}
}
else {
if (event.keyCode < 96 || event.keyCode > 105) {
event.preventDefault();
}
}
}
});
setTimeout(function(){
if($('.alert-login').attr('class')!=undefined){
$('.alert-login').fadeOut('slow');
}
}, 3000);
//INICIALIZAR NICESCROLL
try{
$("html").niceScroll({cursorwidth:"12px" , zindex:"9" , cursorborder:"0px", cursorborderradius:"0px"});
$(".modalText, .historial-status").niceScroll({cursorwidth:"12px" , zindex:"9999" , cursorborder:"0px", cursorborderradius:"0px"});
$(".alerts-div, .team, .message-body, .message-main-contact ").niceScroll({cursorwidth:"7px" , zindex:"3" , cursorborder:"0px", cursorborderradius:"0px"});
//Iniciar selectores
$('select').material_select();
//ANIMACION SCROLL
$(".bloque").smoove({
offset:'10%'
});
//RSLIDER
$(".rslides").responsiveSlides({
speed: 600,
timeout: 10000
});
//INICIANDO SLIDER MATERIALICE
$('.slider').slider({full_width: true});
//INICIADO ACORDEON EN CALIFICACIONES
$('.collapsible').collapsible();
//TABS
$('ul.tabs').tabs();
//MAPA DE CONTACTO
$("#btn-contact").click(function(){
$(".contact").fadeToggle('medium');
$("html, body").animate({ scrollTop: $('#contacto').offset().top-70 }, 1000);
})
//INICIALIZAR MODALES
$('.modal-trigger').leanModal();
try {
$('.modal').modal();
}
catch(err) {
console.log('che');
}
//MENU HAMBURGUESA
$("#btn-menuHamburguer").click(function(){
$("nav ul , .hamburguer-1 , .hamburguer-2").fadeToggle('medium');
})
//ACTIVAR RECOVER PASSWORD
$('#btn-recover').click(function(){
$("#login-container").hide("slow");
$("#recover-container").show("slow");
})
//ACTIVAR LOGIN
$('#btn-returnContainer').click(function(){
$("#recover-container").hide("slow");
$("#login-container").show("slow");
})
} catch(e) {
console.log(e);
}
//ACTIVAR SUB-OPCIONES 1 en confirmacion de registro
$(".subopciones-tag, .subopciones-tag2").click(function(){
$(this).parent().find('.search').slideToggle('medium')
$(this).parent().find('.subopciones-select').slideToggle('medium')
})
$('.search').keyup(function() {
var texto = $(this).val().toLowerCase();
$(this).parent().next().find('li').show();
$(this).parent().next().find('li').each(function () {
var data = $(this).find('p').html().toLowerCase();
if(texto.length>2 && data.indexOf(texto) == -1){
$(this).hide();
}
});
});
//ACTIVAR SUB-OPCIONES 2 en confirmacion de registro
$(".subopciones-select li a").click(function(){
$(this).toggleClass('active-option')
})
//NUEVA CERTIFICACION en confirmacion de registro
$("#btn-certificado").click(function(){
$("#select-certificado").clone().appendTo("#new-certify");
})
//CONFIRMACION DE REGISTRO
//PASO 1-1 a 1-2
$("#btn-next-confirm1").click(function(){
$("#paso1-3, #btn-return-confirm1, #btn-next-confirm3").show("slow");
$("#paso1-1 , #btn-next-confirm1").hide("slow");
})
//REGRESAR
$("#btn-return-confirm1").click(function(){
$("#paso1-1, #btn-next-confirm1").show("slow");
$("#paso1-3 , #btn-return-confirm1, #btn-next-confirm3").hide("slow");
})
//PASO 1-2 a 1-3
$("#btn-next-confirm2").click(function(){
$("#paso1-3, #btn-return-confirm2, #btn-next-confirm3").show("slow");
$("#paso1-2, #btn-return-confirm1, #btn-next-confirm2").hide("slow");
})
//REGRESAR
$("#btn-return-confirm2").click(function(){
$("#paso1-2, #btn-return-confirm1, #btn-next-confirm2").show("slow");
$("#paso1-3, #btn-return-confirm2, #btn-next-confirm3").hide("slow");
})
//PASO 1-3 a 2-1
$("#btn-next-confirm3").click(function(){
$("#paso2-2, #btn-return-confirm3, #btn-next-confirm5").show("slow");
$("#paso1-3, #btn-return-confirm1, #btn-next-confirm3").hide("slow");
//CAMBIAR LEYENDA
$("#btn-antecentesConfirm").addClass("active");
$("#btn-infoContactoConfirm,btn-preferenciasConfirm, btn-infoLegalConfirm").removeClass("active");
})
//REGRESAR
$("#btn-return-confirm3").click(function(){
$("#paso1-3, #btn-return-confirm1, #btn-next-confirm3").show("slow");
$("#paso2-2, #btn-return-confirm3, #btn-next-confirm5").hide("slow");
//CAMBIAR LEYENDA
$("#btn-infoContactoConfirm").addClass("active");
$("#btn-antecentesConfirm, btn-preferenciasConfirm, btn-infoLegalConfirm").removeClass("active");
})
//PASO 2-1 a 2-2
$("#btn-next-confirm4").click(function(){
$("#paso2-2, #btn-return-confirm4, #btn-next-confirm5").show("slow");
$("#paso2-1, #btn-return-confirm3, #btn-next-confirm4").hide("slow");
})
//REGRESAR
$("#btn-return-confirm4").click(function(){
$("#paso2-1, #btn-return-confirm3, #btn-next-confirm4").show("slow");
$("#paso2-2, #btn-return-confirm4, #btn-next-confirm5").hide("slow");
})
//PASO 2-2 a 3
$("#btn-next-confirm5").click(function(){
$("#paso3, #btn-return-confirm5, #btn-next-confirm6").show("slow");
$("#paso2-2, #btn-return-confirm3, #btn-next-confirm5").hide("slow");
//CAMBIAR LEYENDA
$("#btn-preferenciasConfirm").addClass("active");
$("#btn-antecentesConfirm, #btn-infoContactoConfirm, #btn-infoLegalConfirm").removeClass("active");
})
//REGRESAR
$("#btn-return-confirm5").click(function(){
$("#paso2-2, #btn-return-confirm3, #btn-next-confirm5").show("slow");
$("#paso3, #btn-return-confirm5, #btn-next-confirm6").hide("slow");
//CAMBIAR LEYENDA
$("#btn-antecentesConfirm").addClass("active");
$("#btn-infoContactoConfirm, #btn-preferenciasConfirm, #btn-infoLegalConfirm").removeClass("active");
})
//PASO 3 a 4-1
$("#btn-next-confirm6").click(function(){
$("#paso4-2, #btn-return-confirm6, #btn-next-confirm8").show("slow");
$("#paso3, #btn-return-confirm5, #btn-next-confirm6").hide("slow");
//CAMBIAR LEYENDA
$("#btn-preferenciasConfirm").addClass("active");
$("#btn-infoContactoConfirm, #btn-antecentesConfirm, #btn-infoLegalConfirm").removeClass("active");
})
//REGRESAR
$("#btn-return-confirm6").click(function(){
$("#paso3, #btn-return-confirm5, #btn-next-confirm6").show("slow");
$("#paso4-2, #btn-return-confirm6, #btn-next-confirm8").hide("slow");
//CAMBIAR LEYENDA
$("#btn-preferenciasConfirm").addClass("active");
$("#btn-infoContactoConfirm, #btn-antecentesConfirm, #btn-infoLegalConfirm").removeClass("active");
})
//PASO 4-1 a 4-2
$("#btn-next-confirm7").click(function(){
$("#paso4-2, #btn-return-confirm7, #btn-next-confirm8").show("slow");
$("#paso4-1, #btn-return-confirm6, #btn-next-confirm7").hide("slow");
})
//REGRESAR
$("#btn-return-confirm7").click(function(){
$("#paso4-1, #btn-return-confirm6, #btn-next-confirm7").show("slow");
$("#paso4-2, #btn-return-confirm7, #btn-next-confirm8").hide("slow");
})
//PASO 4-2 a 4-3
$("#btn-next-confirm8").click(function(){
$("#paso4-3").show("slow");
$("#paso4-2, #btn-return-confirm6, #btn-next-confirm8").hide("slow");
})
//EFECTO HEADER
$(window).scroll(function() {
var alto_rslides = $('.header-index').height()
if ($(document).scrollTop() > alto_rslides) {
$('.header-index header').addClass('sticky');
}
else {
$('.header-index header').removeClass('sticky')
}
});
//ANIMACION ENTRE ANCLAS
/*$(function() {
$('a[href*="#"]:not([href="#"])').click(function() {
if (location.pathname.replace(/^\//,'') == this.pathname.replace(/^\//,'') && location.hostname == this.hostname) {
var target = $(this.hash);
target = target.length ? target : $('[name=' + this.hash.slice(1) +']');
if (target.length) {
$('html, body').animate({
scrollTop: target.offset().top
}, 1000);
return false;
}
}
});
});*/
//INDEX-REGISTRADO
//DESPLEGAR TODA LA LISTA
//ELIMINAR UNA ALERTA
$(".close-alert").click(function(){
$(this).parent().fadeOut();
})
//ACTIVAR BUSCADOR DE CANDIDATOS
$(".btn-search").click(function(){
$(".active-one").hide();
$(".active-two").fadeToggle('medium');
})
$(".btn-closeInput").click(function(){
$(".active-two").hide();
$(".active-one").fadeToggle('medium');
})
//TEAM - CANDIDATOS MOBILE
$("#btn-teamMobile").click(function(){
$("#teamMobile-container").fadeToggle('medium');
})
//MENSAJE CONFIRMACION MODAL REGISTRADO
$("#btn-modalMain").click(function(){
$(".formLogin").hide();
$(".messageModal").fadeToggle('medium');
})
//CREAR POST
//PASO 1 A PASO 2
$("#next-newPost1").click(function(){
$("#crear-container").hide();
$("#detail-container").toggleClass("pagination-on");
$("#indi-create , #indi-detail").toggleClass("active");
})
//PASO 2 A PASO 1
$("#return-newPost1").click(function(){
$("#detail-container").removeClass("pagination-on");
$("#crear-container").addClass("pagination-on");
$("#indi-create , #indi-detail").toggleClass("active");
})
//PASO 2 A PASO 3
$("#next-newPost2").click(function(){
$("#detail-container").hide();
$("#go-container, #detail-container").toggleClass("pagination-on");
$("#indi-go , #indi-detail").toggleClass("active");
})
//ALERTAS EN EL NAV
$("#btn-alertNav").click(function(){
$("#alertNavContain").toggleClass("pagination-on");
})
}
);
try{
$(".phone").intlTelInput({/*
allowDropdown: false,
autoHideDialCode: false,
autoPlaceholder: "off",
dropdownContainer: "body",
excludeCountries: ["us"],
formatOnDisplay: false,
geoIpLookup: function(callback) {
$.get("http://ipinfo.io", function() {}, "jsonp").always(function(resp) {
var countryCode = (resp && resp.country) ? resp.country : "";
callback(countryCode);
});
},
initialCountry: "auto",
nationalMode: false,
onlyCountries: ['us', 'gb', 'ch', 'ca', 'do'],
placeholderNumberType: "MOBILE",
preferredCountries: ['cn', 'jp'],
separateDialCode: true,*/
utilsScript: "/assets/js/utils.js"
});
} catch($e){
console.log('che');
}
|
package io.github.vampirestudios.obsidian.api.obsidian.block;
import io.github.vampirestudios.obsidian.api.obsidian.BlockProperty;
import io.github.vampirestudios.obsidian.api.obsidian.DisplayInformation;
import io.github.vampirestudios.obsidian.api.obsidian.NameInformation;
import io.github.vampirestudios.obsidian.api.obsidian.item.FoodInformation;
import net.minecraft.util.Identifier;
import net.minecraft.util.registry.Registry;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class Block {
public BlockType block_type = BlockType.BLOCK;
public BlockInformation information;
public DisplayInformation display;
public AdditionalBlockInformation additional_information;
public Functions functions;
public OreInformation ore_information;
public FoodInformation food_information;
public CampfireProperties campfire_properties;
public List<Identifier> can_plant_on = new ArrayList<>();
public Identifier particle_type;
public Growable growable;
public OxidizableProperties oxidizable_properties;
public Map<String, BlockProperty> events;
public DropInformation dropInformation;
public boolean isMultiBlock = false;
public MultiBlockInformation multiBlockInformation;
public List<net.minecraft.block.Block> getSupportableBlocks() {
List<net.minecraft.block.Block> blocks2 = new ArrayList<>();
can_plant_on.forEach(identifier -> blocks2.add(Registry.BLOCK.get(identifier)));
return blocks2;
}
public enum BlockType {
BLOCK,
HORIZONTAL_FACING_BLOCK,
ROTATABLE_BLOCK,
CAMPFIRE,
STAIRS,
SLAB,
WALL,
FENCE,
FENCE_GATE,
CAKE,
BED,
TRAPDOOR,
METAL_DOOR,
WOODEN_DOOR,
LOG,
STEM,
WOOD,
OXIDIZING_BLOCK,
PLANT,
PILLAR,
HORIZONTAL_FACING_PLANT,
SAPLING,
TORCH,
BEEHIVE,
LEAVES,
LADDER,
PATH,
WOODEN_BUTTON,
STONE_BUTTON,
DOUBLE_PLANT,
HORIZONTAL_FACING_DOUBLE_PLANT,
HANGING_DOUBLE_LEAVES,
EIGHT_DIRECTIONAL_BLOCK,
LANTERN,
CHAIN,
PANE,
DYEABLE,
LOOM,
GRINDSTONE,
CRAFTING_TABLE,
PISTON,
NOTEBLOCK,
JUKEBOX,
SMOKER,
FURNACE,
BLAST_FURNACE,
LECTERN,
FLETCHING_TABLE,
BARREL,
COMPOSTER,
RAILS,
CARTOGRAPHY_TABLE
}
public static class OxidizableProperties {
public List<OxidationStage> stages;
public List<String> cycle;
public static class OxidationStage {
public boolean can_be_waxed;
public List<VariantBlock> blocks;
public boolean stairs;
public boolean slab;
public static class VariantBlock {
public NameInformation name;
public DisplayInformation display;
}
}
}
public static class CampfireProperties {
public boolean emits_particles;
public int fire_damage;
public int luminance;
}
public static class MultiBlockInformation {
public int width;
public int height;
}
}
|
<reponame>JamesParkinSonos/okta-auth-js
/*!
* Copyright (c) 2019-present, Okta, Inc. and/or its affiliates. All rights reserved.
* The Okta software accompanied by this notice is provided pursuant to the Apache License, Version 2.0 (the "License.")
*
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and limitations under the License.
*
*/
/* eslint-disable complexity, max-statements */
import { stringToBase64Url } from '../../crypto';
import { MIN_VERIFIER_LENGTH, MAX_VERIFIER_LENGTH, DEFAULT_CODE_CHALLENGE_METHOD } from '../../constants';
import { webcrypto } from '../../crypto';
function dec2hex (dec) {
return ('0' + dec.toString(16)).substr(-2);
}
function getRandomString(length) {
var a = new Uint8Array(Math.ceil(length / 2));
webcrypto.getRandomValues(a);
var str = Array.from(a, dec2hex).join('');
return str.slice(0, length);
}
function generateVerifier(prefix?: string): string {
var verifier = prefix || '';
if (verifier.length < MIN_VERIFIER_LENGTH) {
verifier = verifier + getRandomString(MIN_VERIFIER_LENGTH - verifier.length);
}
return encodeURIComponent(verifier).slice(0, MAX_VERIFIER_LENGTH);
}
function computeChallenge(str: string): PromiseLike<any> {
var buffer = new TextEncoder().encode(str);
return webcrypto.subtle.digest('SHA-256', buffer).then(function(arrayBuffer) {
var hash = String.fromCharCode.apply(null, new Uint8Array(arrayBuffer));
var b64u = stringToBase64Url(hash); // url-safe base64 variant
return b64u;
});
}
export default {
DEFAULT_CODE_CHALLENGE_METHOD,
generateVerifier,
computeChallenge
};
|
#!/bin/bash
version=$(<../../version.txt)
brew update
brew cask upgrade
brew cask install packages
cp -a ../../SnipInsight.Forms.GTK/bin/Release/. "Release/Snip Insights.app/Contents/MacOS/"
rm -r build
mkdir build
/usr/local/bin/packagesbuild --package-version $version -v snipInsightInstaller.pkgproj > log.txt
mv "build/Snip Insights.pkg" "build/Snip Insights-$version.pkg"
|
#ifndef ZDBFS_INODE_H
#define ZDBFS_INODE_H
int zdbfs_inode_init(zdbfs_t *fs);
size_t zdbfs_inode_dirlist_id(const char *name);
void zdbfs_inode_dump(zdb_inode_t *inode);
size_t zdbfs_offset_to_block(off_t off);
size_t zdbfs_inode_dir_size(zdb_dir_t *dir);
size_t zdbfs_inode_file_size(zdb_inode_t *inode);
zdb_dir_t *zdbfs_dir_new();
zdb_inode_t *zdbfs_inode_deserialize_dir(zdb_t *backend, zdb_inode_t *inode, uint8_t *buffer, size_t length);
zdb_inode_t *zdbfs_inode_deserialize_file(zdb_inode_t *inode, uint8_t *buffer, size_t length);
zdb_inode_t *zdbfs_inode_deserialize(zdb_t *backend, uint8_t *buffer, size_t length);
buffer_t zdbfs_inode_serialize_file(zdb_inode_t *inode);
buffer_t zdbfs_inode_serialize_dir(zdb_t *backend, zdb_inode_t *inode);
size_t zdbfs_direntry_size(zdb_direntry_t *entry);
zdb_direntry_t *zdbfs_direntry_new(uint64_t ino, const char *name);
zdb_dir_t *zdbfs_dir_append(zdb_dir_t *dir, zdb_direntry_t *entry);
buffer_t zdbfs_inode_serialize(zdb_t *backend, zdb_inode_t *inode);
void zdbfs_inode_free(zdb_inode_t *inode);
zdb_dir_t *zdbfs_inode_dir_get(zdb_inode_t *inode, const char *name);
zdb_dir_root_t *zdbfs_inode_dir_root_get(zdb_inode_t *inode);
zdb_dir_root_t *zdbfs_inode_dir_root_set(zdb_inode_t *inode, zdb_dir_root_t *root);
zdb_dir_t *zdbfs_inode_dir_append(zdb_inode_t *inode, uint64_t ino, const char *name);
void zdbfs_inode_block_set(zdb_inode_t *inode, size_t block, uint32_t blockid);
uint32_t zdbfs_inode_block_get(zdb_inode_t *inode, size_t block);
zdb_reply_t *zdbfs_inode_block_fetch(fuse_req_t req, zdb_inode_t *file, uint64_t ino, uint32_t block);
uint32_t zdbfs_inode_block_store(fuse_req_t req, zdb_inode_t *inode, uint64_t ino, uint32_t blockid, const char *buffer, size_t buflen);
zdb_blocks_t *zdbfs_inode_blocks_get(zdb_inode_t *inode);
int zdbfs_inode_blocks_remove(fuse_req_t req, zdb_inode_t *inode);
int zdbfs_inode_remove_entry(zdb_inode_t *inode, const char *name);
int zdbfs_inode_unlink(fuse_req_t req, zdb_inode_t *file, uint64_t ino);
zdb_direntry_t *zdbfs_inode_lookup_direntry(zdb_inode_t *inode, const char *name);
void zdbfs_inode_to_stat(struct stat *st, zdb_inode_t *inode, uint64_t ino);
void zdbfs_inode_to_fuse_param(struct fuse_entry_param *param, zdb_inode_t *inode, uint64_t ino);
zdb_inode_t *zdbfs_inode_new_file(fuse_req_t req, uint32_t mode);
zdb_inode_t *zdbfs_inode_new_symlink(fuse_req_t req, const char *link);
zdb_inode_t *zdbfs_inode_new_dir(uint64_t parent, uint32_t mode);
const char *zdbfs_inode_symlink_get(zdb_inode_t *inode);
zdb_inode_t *zdbfs_inode_fetch(fuse_req_t req, fuse_ino_t ino);
zdb_inode_t *zdbfs_directory_fetch(fuse_req_t req, fuse_ino_t ino);
uint64_t zdbfs_inode_store_backend(zdb_t *backend, zdb_inode_t *inode, uint64_t ino);
uint64_t zdbfs_inode_store_metadata(fuse_req_t req, zdb_inode_t *inode, uint64_t ino);
uint64_t zdbfs_inode_store_data(fuse_req_t req, zdb_inode_t *inode, uint64_t ino);
int zdbfs_inode_init_release(zdbfs_t *fs);
char *zdbfs_inode_resolv(fuse_req_t req, fuse_ino_t target, const char *name);
#endif
|
<filename>App/app/src/main/java/com/crossover/mobiliza/app/data/local/AppDatabase.java
package com.crossover.mobiliza.app.data.local;
import android.content.Context;
import android.util.Log;
import androidx.room.Database;
import androidx.room.Room;
import androidx.room.RoomDatabase;
import androidx.room.TypeConverters;
import com.crossover.mobiliza.app.data.local.converters.Converters;
import com.crossover.mobiliza.app.data.local.dao.EventoDao;
import com.crossover.mobiliza.app.data.local.dao.OngDao;
import com.crossover.mobiliza.app.data.local.dao.UserDao;
import com.crossover.mobiliza.app.data.local.dao.VoluntarioDao;
import com.crossover.mobiliza.app.data.local.entity.Evento;
import com.crossover.mobiliza.app.data.local.entity.Ong;
import com.crossover.mobiliza.app.data.local.entity.User;
import com.crossover.mobiliza.app.data.local.entity.Voluntario;
@Database(entities = {
Evento.class,
Ong.class,
Voluntario.class,
User.class}, version = 11, exportSchema = false)
@TypeConverters(Converters.class)
public abstract class AppDatabase extends RoomDatabase {
private static final String TAG = AppDatabase.class.getSimpleName();
private static final Object LOCK = new Object();
private static final String NAME = "mobiliza_db";
private static AppDatabase sInstance;
public static AppDatabase getInstance(Context context) {
if (sInstance == null) {
synchronized (LOCK) {
Log.d(TAG, "Creating database instance");
sInstance = Room
.databaseBuilder(context.getApplicationContext(), AppDatabase.class, NAME)
.fallbackToDestructiveMigration()
.build();
}
}
return sInstance;
}
public abstract OngDao ongDao();
public abstract VoluntarioDao voluntarioDao();
public abstract EventoDao eventoDao();
public abstract UserDao userDao();
}
|
/*
*
*/
package net.community.chest.lang.math;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import net.community.chest.lang.StringUtil;
import net.community.chest.util.collection.CollectionsUtils;
import net.community.chest.util.map.BooleansMap;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* <P>Holds enumeration values for the size units "hierarchy"</P>
* @author <NAME>.
* @since Sep 22, 2008 8:47:50 AM
*/
public enum SizeUnits {
B("B", 1L),
KB("KB", B),
MB("MB", KB),
GB("GB", MB),
TB("TB", GB),
PB("PB", TB),
HB("HB", PB); // 62 bits representation - max. possible with long
private final String _sizeName;
/**
* @return Suffix used to denote a unit
*/
public final String getSizeName ()
{
return _sizeName;
}
private final long _mulFactor;
/**
* @return Number of bytes representing <U>one</U> unit
*/
public final long getMultiplicationFactor ()
{
return _mulFactor;
}
public long getSizeValue (long numUnits)
{
return numUnits * getMultiplicationFactor();
}
/**
* Converts from another unit into this one
* @param numUnits Number of {@link SizeUnits} of the "other"
* unit
* @param unit The "other" unit
* @return The number of units required to represent the "other"
* unit into "this" one
* @throws IllegalArgumentException if no "other" unit instance
* provided
*/
public double convertToThisUnit (final double numUnits, final SizeUnits unit) throws IllegalArgumentException
{
if (null == unit)
throw new IllegalArgumentException("convertToThisUnit(" + numUnits + ") no other unit specified");
final long thisFactor=getMultiplicationFactor(), otherFactor=unit.getMultiplicationFactor();
if (thisFactor == otherFactor)
return numUnits;
final double unitFactor=(double) otherFactor / (double) thisFactor;
return unitFactor * numUnits;
}
/**
* Converts a size in <U>bytes</U> into the best unit/value "pair"
* @param sz Size in <U>bytes</U> - may not be negative
* @return The closest unit/value "pairs" represented as a {@link Map}
* whose key={@link SizeUnits} value, value=number of such units required
* to reach/add the specified size
* @throws NumberFormatException if negative value provided
*/
public static final Map<SizeUnits,Long> fromSize (final long sz) throws NumberFormatException
{
if (sz < 0L)
throw new NumberFormatException("fromSize(" + sz + ") negative values N/A");
final Map<SizeUnits,Long> szMap=new EnumMap<SizeUnits,Long>(SizeUnits.class);
// shortcut (covers ZERO as well)
if (sz < KB.getMultiplicationFactor())
{
szMap.put(SizeUnits.B, Long.valueOf(sz));
return szMap;
}
// we go "downwards" in the sizes
final Collection<SizeUnits> ua=SizeUnits.getValues(Boolean.FALSE);
long remSize=sz;
for (final SizeUnits u : ua)
{
final long mulFactor=(null == u) ? 0L : u.getMultiplicationFactor();
if ((mulFactor <= 0L) // should not happen
|| (mulFactor > remSize)) // means have less than 1 unit
continue;
final long mulUnit=remSize / mulFactor;
szMap.put(u, Long.valueOf(mulUnit));
if (0 == (remSize %= mulFactor))
break;
}
if (remSize > 0L) // should not happen
throw new NumberFormatException("fromSize(" + sz + ") incomplete conversion");
return szMap;
}
SizeUnits (final String sizeName, final long mulFactor)
{
_sizeName = sizeName;
_mulFactor = mulFactor;
}
SizeUnits (final String sizeName, final SizeUnits subUnit)
{
this(sizeName, subUnit.getMultiplicationFactor() * 1024L);
}
@SuppressWarnings({ "cast", "unchecked", "rawtypes" })
private static final BooleansMap<List<SizeUnits>> _unitsMap=(BooleansMap<List<SizeUnits>>) new BooleansMap(List.class, true);
/**
* Returns a (cached) array of {@link SizeUnits} sorted by multiplication
* factor according to the provided parameter
* @param ascending Sort direction (<code>null</code> means un-sorted)
* @return A {@link List} of {@link SizeUnits} sorted by multiplication factor
* (if non-<code>null</code> sort direction specified)
*/
public static final List<SizeUnits> getValues (final Boolean ascending /* null == unsorted */)
{
synchronized(_unitsMap) {
List<SizeUnits> vl=_unitsMap.get(ascending);
if (null == vl)
{
final SizeUnits[] va=values();
if (ascending != null)
{
final Comparator<SizeUnits> c=ascending.booleanValue()
? ByFactorSizeUnitsComparator.ASCENDING
: ByFactorSizeUnitsComparator.DESCENDING;
Arrays.sort(va, c);
}
vl = Collections.unmodifiableList(Arrays.asList(va));
_unitsMap.put(ascending, vl);
}
return vl;
}
}
public static final List<SizeUnits> getValues ()
{
return getValues(null);
}
public static final SizeUnits fromString (final String s)
{
return CollectionsUtils.fromString(getValues(), s, false);
}
public static final SizeUnits fromSizeName (final String n)
{
if ((null == n) || (n.length() <= 0))
return null;
final Collection<SizeUnits> vals=getValues();
if ((null == vals) || (vals.size() <= 0))
return null; // should not happen
for (final SizeUnits v : vals)
{
final String vn=(null == v) ? null : v.getSizeName();
if (0 == StringUtil.compareDataStrings(n, vn, false))
return v;
}
return null;
}
public static final SizeUnits fromSizeChar (final char c)
{
final Collection<SizeUnits> vals=getValues();
if ((null == vals) || (vals.size() <= 0))
return null; // should not happen
final char cc=((c >= 'a') && (c <= 'z')) ? Character.toUpperCase(c) : c;
for (final SizeUnits v : vals)
{
final String vn=(null == v) ? null : v.getSizeName();
if ((vn != null) && (vn.length() > 0) && (vn.charAt(0) == cc))
return v;
}
return null;
}
// format: 1G2M3K4B
public static final Map<SizeUnits,Long> fromSizeString (final CharSequence cs) throws NumberFormatException
{
final int csLen=(null == cs) ? 0 : cs.length();
int lastPos=0;
Map<SizeUnits,Long> ret=null;
for (int curPos=0; curPos < csLen; curPos++)
{
final char c=cs.charAt(curPos);
if ((c >= '0') && (c <= '9'))
continue;
if (curPos <= lastPos)
throw new NumberFormatException("fromSizeString(" + cs + ") no number before '" + String.valueOf(c) + "' unit specifier");
final SizeUnits u=fromSizeChar(c);
if (null == u)
throw new NumberFormatException("fromSizeString(" + cs + ") unknown unit: " + String.valueOf(c));
final CharSequence vs=cs.subSequence(lastPos, curPos);
final Long v=Long.valueOf(vs.toString()),
p=(null == ret) ? null : ret.get(u);
if (p != null) // if have a previous value for the same unit then add them
{
ret.put(u, Long.valueOf(v.longValue() + p.longValue()));
continue;
}
if (null == ret)
ret = new EnumMap<SizeUnits,Long>(SizeUnits.class);
ret.put(u, v);
}
return ret;
}
// NOTE: overflow may occur for high memory sizes
public static final double fromSizeUnits (final Collection<? extends Map.Entry<SizeUnits,? extends Number>> sl, final SizeUnits targetUnit)
{
if ((null == sl) || (sl.size() <= 0) || (null == targetUnit))
return 0.0d;
double ret=0.0d;
for (final Map.Entry<SizeUnits,? extends Number> se : sl)
{
final SizeUnits u=(null == se) ? null : se.getKey();
final Number n=(null == se) ? null : se.getValue();
if ((null == u) || (null == n) || (0L == n.longValue()))
continue;
final double v=targetUnit.convertToThisUnit(n.doubleValue(), u);
if (v <= 0.0d)
continue;
ret += v;
}
return ret;
}
// NOTE: overflow may occur for high memory sizes
public static final double fromSizeUnits (final Map<SizeUnits,? extends Number> m, final SizeUnits targetUnit)
{
return fromSizeUnits(((null == m) || (m.size() <= 0)) ? null : m.entrySet(), targetUnit);
}
// format: 1G2M3K4B
public static final double fromSizeString (final CharSequence cs, final SizeUnits targetUnit) throws NumberFormatException
{
final Map<SizeUnits,? extends Number> m=(null == targetUnit) ? null : fromSizeString(cs);
return fromSizeUnits(m, targetUnit);
}
}
|
<gh_stars>0
#include <proc.h>
#include <scheduler.h>
void schedule(void) {
if(_current_proc->state == READY) { //current process ready to run
_current_proc->state = RUNNING;
proc_start(_current_proc);
return;
}
//current process is runing, switch to next one.
process_t* head_proc = _current_proc;
process_t* proc = _current_proc->next;
while(head_proc != proc) {
proc_sleep_check(proc);
if(proc->state == READY) {
if(head_proc->state == RUNNING) //current process running.
head_proc->state = READY;
proc->state = RUNNING; //run next one.
proc_start(proc);
return;
}
else if(proc->state == TERMINATED) {
process_t* tmp = proc;
proc = proc->next;
proc_free(tmp);
}
else
proc = proc->next;
}
}
|
package kr.co.gardener.admin.service.user;
import java.util.List;
import kr.co.gardener.admin.model.other.Notice;
import kr.co.gardener.admin.model.other.list.NoticeList;
import kr.co.gardener.util.Pager;
public interface NoticeService {
List<Notice> list();
void add(Notice item);
Notice item(int noticeId);
void update(Notice item);
void delete(int noticeId);
NoticeList list_pager(Pager pager);
void insert_list(NoticeList list);
void delete_list(NoticeList list);
void update_list(NoticeList list);
}
|
<reponame>cybertoothca/ember-cli-text-field-mixins
import { later } from '@ember/runloop';
import Mixin from '@ember/object/mixin';
/**
* When focus is placed in an `input[:text]` or `textarea` the text within is selected.
*/
export default Mixin.create({
/**
* If you override make sure to `this._super(...arguments)` to preserve this behaviour.
*/
focusIn(/*event*/) {
this._super(...arguments);
if (this['focusSelectsText?']) {
// using a runloop to make sure textarea text can be selected in webkit/safari
// @see https://stackoverflow.com/a/6201757/545137
later(this, () => {
this.element.select();
}, 1);
}
},
/**
* By default, focus on `this` input/textarea will select the text within. Set this to `false` if you do not
* want this behaviour.
*/
'focusSelectsText?': true
});
|
#!/bin/bash
set -e
if [[ ! -f "/etc/os-release" ]]; then
echo "ERROR: can't determine OS type"
exit 1
fi
# read os-release infos
set +e; . /etc/os-release 2>/dev/null; set -e
if [[ "x$NAME" = "xDebian GNU/Linux" ]] && [[ -e "/etc/chip_build_info.txt" ]]; then
echo "INFO: OS Debian on C.H.I.P. computer detected."
else
echo "ERROR: wrong OS type '$NAME'"
exit 1
fi
# part 1:
# install Docker default settings for overlay fs
mkdir -p /etc/systemd/system
rm -fr /etc/systemd/system/docker.service*
curl -sSL https://raw.githubusercontent.com/docker/docker/master/contrib/init/systemd/docker.service > /etc/systemd/system/docker.service
sed -i 's|ExecStart=/usr/bin/dockerd -H|ExecStart=/usr/bin/dockerd --storage-driver overlay -H|' /etc/systemd/system/docker.service
rm -fr /etc/systemd/system/docker.service.d
systemctl daemon-reload
echo "SUCCESS: fix part 1 applied, created '/etc/systemd/system/docker.service'!"
# part 2:
# ensure we have APT https support installed
if [[ ! -e /usr/lib/apt/methods/https ]]; then
apt-get update
apt-get install -y apt-transport-https ca-certificates
fi
# install APT keys for Docker APT repo
gpg_fingerprint="58118E89F3A912897C070ADBF76221572C52609D"
key_servers="
ha.pool.sks-keyservers.net
pgp.mit.edu
keyserver.ubuntu.com
"
for key_server in $key_servers ; do
apt-key adv --keyserver "hkp://${key_server}:80" --recv-keys ${gpg_fingerprint} && break
done
apt-key adv -k ${gpg_fingerprint} >/dev/null
# create repo list for Docker APT repo
#TODO: change to 'debian-jessie' as soon as the .deb is available on Docker APT repo
echo "deb [arch=armhf] https://apt.dockerproject.org/repo raspbian-jessie main" > /etc/apt/sources.list.d/docker.list
echo "SUCCESS: fix part 2 applied, created '/etc/apt/sources.list.d/docker.list'!"
# part 3:
# install Docker Engine
apt-get update
apt-get install -y docker-engine
echo "SUCCESS: fix part 3 applied, Docker Engine installed!"
docker -v
docker version
### done - SUCCESS
echo -e "\nSUCCESS: Fix 002 successfully applied."
echo " don't forget to send me a tweet to @Quintus23M, thanks. ;-)"
|
package cyclops.pure.instances.control;
import static cyclops.container.control.Ior.narrowK;
import cyclops.function.higherkinded.DataWitness.ior;
import cyclops.function.higherkinded.Higher;
import cyclops.function.higherkinded.Higher2;
import cyclops.pure.arrow.Cokleisli;
import cyclops.pure.arrow.Kleisli;
import cyclops.pure.arrow.MonoidK;
import cyclops.container.control.Either;
import cyclops.container.control.Ior;
import cyclops.container.control.Option;
import cyclops.function.combiner.Monoid;
import cyclops.pure.container.functional.Active;
import cyclops.pure.container.functional.Coproduct;
import cyclops.pure.container.functional.Nested;
import cyclops.pure.container.functional.Product;
import cyclops.pure.typeclasses.InstanceDefinitions;
import cyclops.pure.typeclasses.Pure;
import cyclops.pure.typeclasses.comonad.Comonad;
import cyclops.pure.typeclasses.foldable.Foldable;
import cyclops.pure.typeclasses.foldable.Unfoldable;
import cyclops.pure.typeclasses.functor.BiFunctor;
import cyclops.pure.typeclasses.functor.Functor;
import cyclops.pure.typeclasses.monad.Applicative;
import cyclops.pure.typeclasses.monad.ApplicativeError;
import cyclops.pure.typeclasses.monad.Monad;
import cyclops.pure.typeclasses.monad.MonadPlus;
import cyclops.pure.typeclasses.monad.MonadRec;
import cyclops.pure.typeclasses.monad.MonadZero;
import cyclops.pure.typeclasses.monad.Traverse;
import cyclops.pure.typeclasses.monad.TraverseByTraverse;
import java.util.function.Function;
import lombok.AllArgsConstructor;
public class IorInstances {
private final static IorTypeclasses INSTANCE = new IorTypeclasses<>();
public static <L, T> Kleisli<Higher<ior, L>, Ior<L, T>, T> kindKleisli() {
return Kleisli.of(IorInstances.monad(),
Ior::widen);
}
public static <L, T> Cokleisli<Higher<ior, L>, T, Ior<L, T>> kindCokleisli() {
return Cokleisli.of(Ior::narrowK);
}
public static <W1, ST, PT> Nested<Higher<ior, ST>, W1, PT> nested(Ior<ST, Higher<W1, PT>> nested,
InstanceDefinitions<W1> def2) {
return Nested.of(nested,
IorInstances.definitions(),
def2);
}
public static <W1, LT, RT> Product<Higher<ior, LT>, W1, RT> product(Ior<LT, RT> ior,
Active<W1, RT> active) {
return Product.of(allTypeclasses(ior),
active);
}
public static <W1, LT, RT> Coproduct<W1, Higher<ior, LT>, RT> coproduct(Ior<LT, RT> ior,
InstanceDefinitions<W1> def2) {
return Coproduct.right(ior,
def2,
IorInstances.definitions());
}
public static <LT, RT> Active<Higher<ior, LT>, RT> allTypeclasses(Ior<LT, RT> ior) {
return Active.of(ior,
IorInstances.definitions());
}
public static <W2, R, LT, RT> Nested<Higher<ior, LT>, W2, R> mapM(Ior<LT, RT> ior,
Function<? super RT, ? extends Higher<W2, R>> fn,
InstanceDefinitions<W2> defs) {
return Nested.of(ior.map(fn),
IorInstances.definitions(),
defs);
}
public static <L> InstanceDefinitions<Higher<ior, L>> definitions() {
return new InstanceDefinitions<Higher<ior, L>>() {
@Override
public <T, R> Functor<Higher<ior, L>> functor() {
return IorInstances.functor();
}
@Override
public <T> Pure<Higher<ior, L>> unit() {
return IorInstances.unit();
}
@Override
public <T, R> Applicative<Higher<ior, L>> applicative() {
return IorInstances.applicative();
}
@Override
public <T, R> Monad<Higher<ior, L>> monad() {
return IorInstances.monad();
}
@Override
public <T, R> Option<MonadZero<Higher<ior, L>>> monadZero() {
return Option.none();
}
@Override
public <T> Option<MonadPlus<Higher<ior, L>>> monadPlus() {
return Option.none();
}
@Override
public <T> MonadRec<Higher<ior, L>> monadRec() {
return IorInstances.monadRec();
}
@Override
public <T> Option<MonadPlus<Higher<ior, L>>> monadPlus(MonoidK<Higher<ior, L>> m) {
return Option.none();
}
@Override
public <C2, T> Traverse<Higher<ior, L>> traverse() {
return IorInstances.traverse();
}
@Override
public <T> Foldable<Higher<ior, L>> foldable() {
return IorInstances.foldable();
}
@Override
public <T> Option<Comonad<Higher<ior, L>>> comonad() {
return Option.none();
}
@Override
public <T> Option<Unfoldable<Higher<ior, L>>> unfoldable() {
return Option.none();
}
};
}
public static final <L> IorTypeclasses<L> getInstance() {
return INSTANCE;
}
public static <L> Functor<Higher<ior, L>> functor() {
return INSTANCE;
}
public static <L> Pure<Higher<ior, L>> unit() {
return INSTANCE;
}
public static <L> Applicative<Higher<ior, L>> applicative() {
return INSTANCE;
}
public static BiFunctor<ior> bifunctor() {
return INSTANCE;
}
public static <L> Monad<Higher<ior, L>> monad() {
return INSTANCE;
}
public static <L> Traverse<Higher<ior, L>> traverse() {
return INSTANCE;
}
public static <L> Foldable<Higher<ior, L>> foldable() {
return INSTANCE;
}
public static <X, T, R> MonadRec<Higher<ior, X>> monadRec() {
return INSTANCE;
}
@AllArgsConstructor
public static class IorTypeclasses<L> implements Monad<Higher<ior, L>>, MonadRec<Higher<ior, L>>,
TraverseByTraverse<Higher<ior, L>>, Foldable<Higher<ior, L>>,
ApplicativeError<Higher<ior, L>, L>, BiFunctor<ior> {
@Override
public <T> T foldRight(Monoid<T> monoid,
Higher<Higher<ior, L>, T> ds) {
Ior<L, T> ior = narrowK(ds);
return ior.fold(monoid);
}
@Override
public <T> T foldLeft(Monoid<T> monoid,
Higher<Higher<ior, L>, T> ds) {
Ior<L, T> ior = narrowK(ds);
return ior.fold(monoid);
}
@Override
public <T, R> R foldMap(Monoid<R> mb,
Function<? super T, ? extends R> fn,
Higher<Higher<ior, L>, T> nestedA) {
return narrowK(nestedA).<R>map(fn).fold(mb);
}
@Override
public <T, R, T2, R2> Higher2<ior, R, R2> bimap(Function<? super T, ? extends R> fn,
Function<? super T2, ? extends R2> fn2,
Higher2<ior, T, T2> ds) {
return narrowK(ds).bimap(fn,
fn2);
}
@Override
public <T> Higher<Higher<ior, L>, T> raiseError(L l) {
return Ior.left(l);
}
@Override
public <T> Higher<Higher<ior, L>, T> handleErrorWith(Function<? super L, ? extends Higher<Higher<ior, L>, ? extends T>> fn,
Higher<Higher<ior, L>, T> ds) {
Function<? super L, ? extends Ior<L, T>> fn2 = fn.andThen(s -> {
Higher<Higher<ior, L>, T> x = (Higher<Higher<ior, L>, T>) s;
Ior<L, T> r = Ior.narrowK(x);
return r;
});
Ior<L, T> ior = Ior.narrowK(ds);
if (ior.isLeft()) {
Ior<L, ? extends T> res = narrowK(fn.apply(ior.getLeft()
.orElse(null)));
return (Ior<L, T>) res;
}
return ior;
}
@Override
public <T, R> Higher<Higher<ior, L>, R> flatMap(Function<? super T, ? extends Higher<Higher<ior, L>, R>> fn,
Higher<Higher<ior, L>, T> ds) {
Ior<L, T> ior = narrowK(ds);
return ior.flatMap(fn.andThen(Ior::narrowK));
}
@Override
public <C2, T, R> Higher<C2, Higher<Higher<ior, L>, R>> traverseA(Applicative<C2> applicative,
Function<? super T, ? extends Higher<C2, R>> fn,
Higher<Higher<ior, L>, T> ds) {
Ior<L, T> maybe = narrowK(ds);
return maybe.fold(left -> applicative.unit(Ior.left(left)),
right -> applicative.map(m -> Ior.right(m),
fn.apply(right)),
(l, r) -> applicative.map(m -> Ior.both(l,
m),
fn.apply(r)));
}
@Override
public <T, R> Higher<Higher<ior, L>, R> ap(Higher<Higher<ior, L>, ? extends Function<T, R>> fn,
Higher<Higher<ior, L>, T> apply) {
Ior<L, T> ior = narrowK(apply);
Ior<L, ? extends Function<T, R>> iorFn = narrowK(fn);
return iorFn.zip(ior,
(a, b) -> a.apply(b));
}
@Override
public <T> Higher<Higher<ior, L>, T> unit(T value) {
return Ior.right(value);
}
@Override
public <T, R> Higher<Higher<ior, L>, R> map(Function<? super T, ? extends R> fn,
Higher<Higher<ior, L>, T> ds) {
Ior<L, T> ior = narrowK(ds);
return ior.map(fn);
}
@Override
public <T, R> Higher<Higher<ior, L>, R> tailRec(T initial,
Function<? super T, ? extends Higher<Higher<ior, L>, ? extends Either<T, R>>> fn) {
Ior<L, ? extends Either<T, R>>[] next = new Ior[1];
next[0] = Ior.right(Either.left(initial));
boolean cont = true;
do {
cont = next[0].fold(p -> p.fold(s -> {
next[0] = narrowK(fn.apply(s));
return true;
},
pr -> false),
() -> false);
} while (cont);
return next[0].map(x -> x.orElse(null));
}
}
}
|
<reponame>AlexRogalskiy/serendipity
/*
const ModuleFederationPlugin = require("webpack/lib/container/ModuleFederationPlugin");
const mf = require("@angular-architects/module-federation/webpack");
const path = require("path");
const share = mf.share;
const sharedMappings = new mf.SharedMappings();
sharedMappings.register(
path.join(__dirname, '../../tsconfig.json'),
[]);
module.exports = {
output: {
uniqueName: "serendipityWebApp",
publicPath: "auto"
},
optimization: {
runtimeChunk: false
},
resolve: {
alias: {
...sharedMappings.getAliases(),
}
},
plugins: [
new ModuleFederationPlugin({
remotes: {
// "party": "party@http://localhost:4201/remoteEntry.js"
},
shared: share({
"@angular/cdk": { singleton: true, strictVersion: true, requiredVersion: 'auto' },
"@angular/common": { singleton: true, strictVersion: true, requiredVersion: 'auto' },
"@angular/common/http": { singleton: true, strictVersion: true, requiredVersion: 'auto' },
"@angular/core": { singleton: true, strictVersion: true, requiredVersion: 'auto' },
"@angular/flex-layout": { singleton: true, strictVersion: true, requiredVersion: 'auto' },
"@angular/forms": { singleton: true, strictVersion: true, requiredVersion: 'auto' },
"@angular/material": { singleton: true, strictVersion: true, requiredVersion: 'auto' },
"@angular/router": { singleton: true, strictVersion: true, requiredVersion: 'auto' },
...sharedMappings.getDescriptors()
})
}),
sharedMappings.getPlugin()
],
};
// https://github.com/angular-architects/module-federation-plugin/blob/main/libs/mf/tutorial/tutorial.md
*/
|
import random
import time
from dagster import Field, In, Out, Output, graph, op
@op(
ins={"chase_duration": In(int)},
out=Out(int),
config_schema={
"chase_size": Field(
int,
default_value=100000,
is_required=False,
description="How big should the pointer chase array be?",
)
},
)
def hammer_op(context, chase_duration):
"""what better way to do a lot of gnarly work than to pointer chase?"""
ptr_length = context.op_config["chase_size"]
data = list(range(0, ptr_length))
random.shuffle(data)
curr = random.randint(0, ptr_length - 1)
# and away we go
start_time = time.time()
while (time.time() - start_time) < chase_duration:
curr = data[curr]
context.log.info("Hammered - start %d end %d" % (start_time, time.time()))
return chase_duration
@op(
config_schema=Field(int, is_required=False, default_value=1),
out={"out_1": Out(int), "out_2": Out(int), "out_3": Out(int), "out_4": Out(int)},
)
def chase_giver(context):
chase_duration = context.op_config
yield Output(chase_duration, "out_1")
yield Output(chase_duration, "out_2")
yield Output(chase_duration, "out_3")
yield Output(chase_duration, "out_4")
@op(
ins={"in_1": In(int), "in_2": In(int), "in_3": In(int), "in_4": In(int)},
out=Out(int),
)
def reducer(_, in_1, in_2, in_3, in_4):
return in_1 + in_2 + in_3 + in_4
@graph
def hammer():
out_1, out_2, out_3, out_4 = chase_giver()
reducer(
in_1=hammer_op(chase_duration=out_1),
in_2=hammer_op(chase_duration=out_2),
in_3=hammer_op(chase_duration=out_3),
in_4=hammer_op(chase_duration=out_4),
)
hammer_default_executor_job = hammer.to_job()
|
<filename>src/js/components/old_todos.js
import React from "react"
import PropTypes from 'prop-types'
import muiThemeable from 'material-ui/styles/muiThemeable'
import {Link} from 'react-router'
import FlatButton from 'material-ui/FlatButton'
import ToDo from "../containers/todo"
const Timeline = ({muiTheme, params, toDos, addNewToDo, removeToDo}) => (
<div>
{
// Test for params
// CAN'T DECLARE A VARIABLE !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
// const {user} = params
params.user !== "MerinEREN" &&
<Link
style={{color: muiTheme.palette.textColor}}
to="MerinEREN"
activeStyle={{
textDecoration: 'none',
color: 'black'
}}
>
Navigate to Merin
</Link>
}
<FlatButton
label="Add New"
onTouchTap={() => addNewToDo()}
/>
{/*
toDos.map((toDo, i) =>
<ToDo
key={i}
index={i}
{...toDo}
remove={() => removeToDo(i)}
/>
)
*/}
{/*
for(const todo of toDos) {
return (
<ToDo
key={todo.id}
index={todo.id}
{...todo}
remove={() => removeToDo(todo.id)}
/>
)
}
*/}
{
Object.keys(toDos).map(function (key) {
let todo = toDos[key];
return (
<ToDo
key={todo.id}
{...todo}
remove={() => removeToDo(todo.id)}
/>
)
})
}
</div>
)
Timeline.propTypes = {
muiTheme: PropTypes.object.isRequired,
params: PropTypes.object.isRequired,
toDos: PropTypes.objectOf(PropTypes.shape({
id: PropTypes.number.isRequired,
children: PropTypes.node.isRequired,
editable: PropTypes.bool.isRequired
}).isRequired).isRequired,
addNewToDo: PropTypes.func.isRequired,
removeToDo: PropTypes.func.isRequired
}
export default muiThemeable()(Timeline)
|
#!/usr/bin/env bash
SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
cd $SCRIPTPATH
cd ../../../
. config.profile
# check the enviroment info
nvidia-smi
export PYTHONPATH="$PWD":$PYTHONPATH
${PYTHON} -m pip install yacs
${PYTHON} -m pip install torchcontrib
${PYTHON} -m pip install git+https://github.com/lucasb-eyer/pydensecrf.git
DATA_DIR="${DATA_ROOT}/cityscapes"
SAVE_DIR="${DATA_ROOT}/seg_result/cityscapes/"
BACKBONE="deepbase_resnest101_dilated8"
CONFIGS="configs/cityscapes/R_101_D_8.json"
CONFIGS_TEST="configs/cityscapes/R_101_D_8_TEST.json"
MODEL_NAME="fcnet"
LOSS_TYPE="fs_auxce_loss"
CHECKPOINTS_NAME="${MODEL_NAME}_${BACKBONE}_"$2
LOG_FILE="./log/cityscapes/${CHECKPOINTS_NAME}.log"
echo "Logging to $LOG_FILE"
mkdir -p `dirname $LOG_FILE`
PRETRAINED_MODEL="./pretrained_model/resnest101-966fb78c.pth"
MAX_ITERS=40000
if [ "$1"x == "train"x ]; then
${PYTHON} -u main.py --configs ${CONFIGS} \
--drop_last y \
--phase train \
--gathered n \
--loss_balance y \
--log_to_file n \
--backbone ${BACKBONE} \
--model_name ${MODEL_NAME} \
--gpu 0 1 2 3 \
--data_dir ${DATA_DIR} \
--loss_type ${LOSS_TYPE} \
--max_iters ${MAX_ITERS} \
--checkpoints_name ${CHECKPOINTS_NAME} \
--pretrained ${PRETRAINED_MODEL} \
2>&1 | tee ${LOG_FILE}
elif [ "$1"x == "resume"x ]; then
${PYTHON} -u main.py --configs ${CONFIGS} \
--drop_last y \
--gathered n \
--loss_balance y \
--log_to_file n \
--backbone ${BACKBONE} \
--model_name ${MODEL_NAME} \
--max_iters ${MAX_ITERS} \
--data_dir ${DATA_DIR} \
--loss_type ${LOSS_TYPE} \
--gpu 0 1 2 3 \
--resume_continue y \
--resume ./checkpoints/cityscapes/${CHECKPOINTS_NAME}_latest.pth \
--checkpoints_name ${CHECKPOINTS_NAME} \
2>&1 | tee -a ${LOG_FILE}
elif [ "$1"x == "val"x ]; then
# ${PYTHON} -u main.py --configs ${CONFIGS} --drop_last y \
# --backbone ${BACKBONE} --model_name ${MODEL_NAME} --checkpoints_name ${CHECKPOINTS_NAME} \
# --phase test --gpu 0 1 2 3 --resume ./checkpoints/cityscapes/${CHECKPOINTS_NAME}_latest.pth \
# --loss_type ${LOSS_TYPE} --test_dir ${DATA_DIR}/val/image \
# --out_dir ${SAVE_DIR}${CHECKPOINTS_NAME}_val
cd lib/metrics
# evaluate the mIoU
# ${PYTHON} -u cityscapes_evaluator.py --pred_dir ${SAVE_DIR}${CHECKPOINTS_NAME}_val/label \
# --gt_dir ${DATA_DIR}/val/label
# evaluate the boundary F-score
${PYTHON} -u cityscapes_fscore_evaluator.py --pred_dir ${SAVE_DIR}${CHECKPOINTS_NAME}_val/label \
--gt_dir ${DATA_DIR}/val/label
elif [ "$1"x == "test"x ]; then
if [ "$3"x == "ss"x ]; then
echo "[single scale] test"
${PYTHON} -u main.py --configs ${CONFIGS} --drop_last y \
--backbone ${BACKBONE} --model_name ${MODEL_NAME} --checkpoints_name ${CHECKPOINTS_NAME} \
--phase test --gpu 0 1 2 3 --resume ./checkpoints/cityscapes/${CHECKPOINTS_NAME}_latest.pth \
--test_dir ${DATA_DIR}/test --log_to_file n \
--out_dir ${SAVE_DIR}${CHECKPOINTS_NAME}_test_ss
else
echo "[multiple scale + flip] test"
${PYTHON} -u main.py --configs ${CONFIGS_TEST} --drop_last y \
--backbone ${BACKBONE} --model_name ${MODEL_NAME} --checkpoints_name ${CHECKPOINTS_NAME} \
--phase test --gpu 0 1 2 3 --resume ./checkpoints/cityscapes/${CHECKPOINTS_NAME}_latest.pth \
--test_dir ${DATA_DIR}/test --log_to_file n \
--out_dir ${SAVE_DIR}${CHECKPOINTS_NAME}_test_ms
fi
else
echo "$1"x" is invalid..."
fi
|
<reponame>useflyyer/robots
export { Extension, Group, Rule } from "./types";
export { PARSE, ParsedResult } from "./parse";
export { Pattern } from "./robots-txt-guard/patterns";
export { makeGuard as GUARD, GuardRule } from "./robots-txt-guard/guard";
|
<gh_stars>0
const _ = require('underscore');
const BaseStep = require('./basestep.js');
class BaseAbilityWindow extends BaseStep {
constructor(game, properties) {
super(game);
this.abilityChoices = [];
this.events = _.flatten([properties.event]);
this.abilityType = properties.abilityType;
}
canTriggerAbility(ability) {
return ability.eventType === this.abilityType && _.any(this.events, event => ability.isTriggeredByEvent(event));
}
emitEvents() {
_.each(this.events, event => {
this.game.emit(event.name + ':' + this.abilityType, ...event.params);
});
}
registerAbilityForEachEvent(ability) {
let matchingEvents = _.filter(this.events, event => ability.isTriggeredByEvent(event));
_.each(matchingEvents, event => {
this.registerAbility(ability, event);
});
}
}
module.exports = BaseAbilityWindow;
|
#!/bin/bash
# Copyright 2017 Pegah Ghahremani
# 2017-18 Vimal Manohar
# 2018 Hossein Hadian
# Apache 2.0
# This script generates examples for multilingual training of neural network
# using separate input egs dir per language as input.
# This scripts produces 3 sets of files --
# egs.*.scp, egs.output.*.ark, egs.weight.*.ark
#
# egs.*.scp are the SCP files of the training examples.
# egs.weight.*.ark map from the key of the example to the language-specific
# weight of that example.
# egs.output.*.ark map from the key of the example to the name of
# the output-node in the neural net for that specific language, e.g.
# 'output-2'.
#
# Begin configuration section.
cmd=run.pl
block_size=256 # This is the number of consecutive egs that we take from
# each source, and it only affects the locality of disk
# access.
lang2weight= # array of weights one per input languge to scale example's output
# w.r.t its input language during training.
stage=0
echo "$0 $@" # Print the command line for logging
if [ -f path.sh ]; then . ./path.sh; fi
. parse_options.sh || exit 1;
if [ $# -lt 3 ]; then
cat <<EOF
This script generates examples for multilingual training of neural network
using separate input egs dir per language as input.
See top of the script for details.
Usage: $0 [opts] <num-input-langs,N> <lang1-egs-dir> ...<langN-egs-dir> <multilingual-egs-dir>
e.g.: $0 [opts] 2 exp/lang1/egs exp/lang2/egs exp/multi/egs
Options:
--cmd (utils/run.pl|utils/queue.pl <queue opts>) # how to run jobs.
--block-size <int|512> # it is the number of consecutive egs that we take from
# each source, and it only affects the locality of disk
# access. This does not have to be the actual minibatch size
EOF
exit 1;
fi
num_langs=$1
shift 1
args=("$@")
megs_dir=${args[-1]} # multilingual directory
mkdir -p $megs_dir
mkdir -p $megs_dir/info
if [ ${#args[@]} != $[$num_langs+1] ]; then
echo "$0: num of input example dirs provided is not compatible with num_langs $num_langs."
echo "Usage:$0 [opts] <num-input-langs,N> <lang1-egs-dir> ...<langN-egs-dir> <multilingual-egs-dir>"
echo "Usage:$0 [opts] 2 exp/lang1/egs exp/lang2/egs exp/multi/egs"
exit 1;
fi
required="egs.scp combine.scp train_diagnostic.scp valid_diagnostic.scp"
train_scp_list=
train_diagnostic_scp_list=
valid_diagnostic_scp_list=
combine_scp_list=
# read paramter from $egs_dir[0]/info and cmvn_opts
# to write in multilingual egs_dir.
check_params="info/feat_dim info/ivector_dim info/left_context info/right_context cmvn_opts"
ivec_dim=`cat ${args[0]}/info/ivector_dim`
if [ $ivec_dim -ne 0 ];then check_params="$check_params info/final.ie.id"; fi
for param in $check_params info/frames_per_eg; do
cat ${args[0]}/$param > $megs_dir/$param || exit 1;
done
tot_num_archives=0
for lang in $(seq 0 $[$num_langs-1]);do
multi_egs_dir[$lang]=${args[$lang]}
for f in $required; do
if [ ! -f ${multi_egs_dir[$lang]}/$f ]; then
echo "$0: no such file ${multi_egs_dir[$lang]}/$f." && exit 1;
fi
done
num_archives=$(cat ${multi_egs_dir[$lang]}/info/num_archives)
tot_num_archives=$[tot_num_archives+num_archives]
train_scp_list="$train_scp_list ${args[$lang]}/egs.scp"
train_diagnostic_scp_list="$train_diagnostic_scp_list ${args[$lang]}/train_diagnostic.scp"
valid_diagnostic_scp_list="$valid_diagnostic_scp_list ${args[$lang]}/valid_diagnostic.scp"
combine_scp_list="$combine_scp_list ${args[$lang]}/combine.scp"
# check parameter dimension to be the same in all egs dirs
for f in $check_params; do
if [ -f $megs_dir/$f ] && [ -f ${multi_egs_dir[$lang]}/$f ]; then
f1=$(cat $megs_dir/$f)
f2=$(cat ${multi_egs_dir[$lang]}/$f)
if [ "$f1" != "$f2" ] ; then
echo "$0: mismatch for $f in $megs_dir vs. ${multi_egs_dir[$lang]}($f1 vs. $f2)."
exit 1;
fi
else
echo "$0: file $f does not exits in $megs_dir or ${multi_egs_dir[$lang]}/$f ."
fi
done
done
if [ ! -z "$lang2weight" ]; then
egs_opt="--lang2weight '$lang2weight'"
fi
if [ $stage -le 0 ]; then
echo "$0: allocating multilingual examples for training."
# Generate egs.*.scp for multilingual setup.
$cmd $megs_dir/log/allocate_multilingual_examples_train.log \
steps/nnet3/multilingual/allocate_multilingual_examples.py $egs_opt \
$train_scp_list $megs_dir || exit 1;
fi
if [ $stage -le 1 ]; then
echo "$0: combine combine.scp examples from all langs in $megs_dir/combine.scp."
# Generate combine.scp for multilingual setup.
$cmd $megs_dir/log/allocate_multilingual_examples_combine.log \
steps/nnet3/multilingual/allocate_multilingual_examples.py $egs_opt \
--egs-prefix "combine." \
$combine_scp_list $megs_dir || exit 1;
echo "$0: combine train_diagnostic.scp examples from all langs in $megs_dir/train_diagnostic.scp."
# Generate train_diagnostic.scp for multilingual setup.
$cmd $megs_dir/log/allocate_multilingual_examples_train_diagnostic.log \
steps/nnet3/multilingual/allocate_multilingual_examples.py $egs_opt \
--egs-prefix "train_diagnostic." \
$train_diagnostic_scp_list $megs_dir || exit 1;
echo "$0: combine valid_diagnostic.scp examples from all langs in $megs_dir/valid_diagnostic.scp."
# Generate valid_diagnostic.scp for multilingual setup.
$cmd $megs_dir/log/allocate_multilingual_examples_valid_diagnostic.log \
steps/nnet3/multilingual/allocate_multilingual_examples.py $egs_opt \
--egs-prefix "valid_diagnostic." \
$valid_diagnostic_scp_list $megs_dir || exit 1;
fi
for egs_type in combine train_diagnostic valid_diagnostic; do
mv $megs_dir/${egs_type}.output.1.ark $megs_dir/${egs_type}.output.ark || exit 1;
mv $megs_dir/${egs_type}.weight.1.ark $megs_dir/${egs_type}.weight.ark || exit 1;
mv $megs_dir/${egs_type}.1.scp $megs_dir/${egs_type}.scp || exit 1;
done
mv $megs_dir/info/egs.num_archives $megs_dir/info/num_archives || exit 1;
mv $megs_dir/info/egs.num_tasks $megs_dir/info/num_tasks || exit 1;
echo "$0: Finished preparing multilingual training example."
|
curl "http://localhost:8080/url/Proxy" \
-H "Content-Type: application/json" \
-H "Authorization: Bearer $MICRO_API_TOKEN" \
-d '{
"shortURL": "https://m3o.one/u/ck6SGVkYp"
}'
|
import statistics
data = [25, 28, 28, 27, 28, 25, 29]
std_dev = statistics.stdev(data)
print(std_dev)
|
/**
* Copyright 2017 iovation, Inc.
* <p>
* Licensed under the MIT License.
* You may not use this file except in compliance with the License.
* A copy of the License is located in the "LICENSE.txt" file accompanying
* this file. This file is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.iovation.launchkey.sdk.transport.domain;
import java.util.List;
public class ServicesGetResponse {
private final List<ServicesGetResponseService> services;
public ServicesGetResponse(List<ServicesGetResponseService> services) {
this.services = services;
}
public List<ServicesGetResponseService> getServices() {
return services;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ServicesGetResponse)) return false;
ServicesGetResponse response = (ServicesGetResponse) o;
return services != null ? services.equals(response.services) : response.services == null;
}
@Override
public String toString() {
return "ServicesGetResponse{" +
"services=" + services +
'}';
}
}
|
import { K8sClientFactory } from './K8sClientFactory';
import { K8sServiceInfo } from '../types/K8sServiceInfo';
import { WebSocketLogger } from '../services/WebSocketLogger';
const fs = require('fs');
class Utils {
private static instance;
private wsLogger: WebSocketLogger;
private constructor() {
}
static getInstance(): Utils {
if (Utils.instance === undefined) {
Utils.instance = new Utils();
}
return Utils.instance;
}
readFileContent(filePath: string) {
return String(fs.readFileSync(filePath));
}
setWsLogger(wsLogger: WebSocketLogger) {
Utils.instance.wsLogger = wsLogger;
}
logInfoMessage(keptnContext: string, message: string, terminate: boolean = false) {
try {
const msg = JSON.stringify({
keptnContext,
message,
keptnService: 'github-service',
logLevel: 'INFO',
});
console.log(msg);
if (this.wsLogger !== undefined) {
this.wsLogger.logMessage(keptnContext, message, 'INFO', terminate);
}
} catch (e) {}
}
logErrorMessage(keptnContext: string, message: string, terminate: boolean = false) {
try {
const msg = JSON.stringify({
keptnContext,
message,
keptnService: 'github-service',
logLevel: 'ERROR',
});
console.log(msg);
if (this.wsLogger !== undefined) {
this.wsLogger.logMessage(keptnContext, message, 'ERROR', true);
}
} catch (e) {}
}
async getK8sServiceUrl(serviceName, namespace): Promise<K8sServiceInfo> {
const k8sClient = new K8sClientFactory().createK8sClient();
const service =
await k8sClient.api.v1.namespace(namespace).service(serviceName).get();
return service as K8sServiceInfo;
}
}
export { Utils };
|
<gh_stars>0
/*
* Created Date: Thu, 6th May 2021, 16:33:41 pm
* Author: <NAME>
* Email: <EMAIL>
* Copyright (c) 2021 The Distance
*/
import gql from 'graphql-tag';
export default gql`
mutation($input: CompleteOnDemandWorkoutInput!) {
completeOnDemandWorkout(input: $input) {
success
}
}
`;
|
<filename>api/project/modules/geoprocessing/utils.py<gh_stars>0
import osmnx as ox
import networkx as nx
def gdf_to_nx(gdf_network):
# generate graph from GeoDataFrame of LineStrings
net = nx.Graph()
net.graph['crs'] = gdf_network.crs
fields = list(gdf_network.columns)
for _, row in gdf_network.iterrows():
first = row.geometry.coords[0]
last = row.geometry.coords[-1]
data = [row[f] for f in fields]
attributes = dict(zip(fields, data))
net.add_edge(first, last, **attributes)
return net
def add_traveltime_colors(G, center_node, mode):
travel_speed = 0
useRoadSpeed = False
if mode == 'drive':
useRoadSpeed = True
elif mode == 'bike':
travel_speed = 15
elif mode == 'walk':
travel_speed = 5
else:
return (None, 'Invalid mode. Try one of "bike", "drive", or "walk".')
trip_times = [5, 10, 15, 20, 25] # minutes
# add an edge attribute for travel time in minutes required to traverse each edge
if useRoadSpeed:
G = ox.add_edge_speeds(G)
else:
# set constant travel speed for all edges
nx.set_edge_attributes(G, travel_speed, 'speed_kph')
G = ox.add_edge_travel_times(G) # computes travel time in seconds
travel_times = nx.get_edge_attributes(G, "travel_time")
for u, v, k, data in G.edges(data=True, keys=True):
data['time'] = travel_times[(u, v, k)] / 60 # convert to min
# get one color for each isochrone
iso_colors = ox.plot.get_colors(n=len(trip_times), cmap='plasma', start=0, return_hex=True)
# color the edges based on subgraph
edge_colors = {}
for trip_time, color in zip(sorted(trip_times, reverse=True), iso_colors):
subgraph = nx.ego_graph(G, center_node, radius=trip_time, distance='time')
for edge in subgraph.edges:
edge_colors[edge] = color
nx.set_edge_attributes(G, edge_colors, 'color')
# project graph back to the standard crs
G = ox.project_graph(G, 'WGS84')
return (G, '')
|
public class DigitalClock {
private int hour;
private int minute;
private int second;
public DigitalClock(int h, int m, int s) {
this.hour = h;
this.minute = m;
this.second = s;
}
public int getHour(){
return this.hour;
}
public int getMinute(){
return this.minute;
}
public int getSecond(){
return this.second;
}
public void setHour(int h){
this.hour = h;
}
public void setMinute(int m){
this.minute = m;
}
public void setSecond(int s){
this.second = s;
}
public String toString(){
return this.hour + ":" + this.minute + ":" + this.second;
}
}
|
public function one($db = null)
{
if ($db !== null) {
return parent::one($db);
} else {
// Assuming $defaultDb is the default database connection
return parent::one($defaultDb);
}
}
|
<reponame>alamin-mahamud/e-commerce-go-api
package main
import (
"log"
"net/http"
)
func main() {
db, err := CreateConnection()
defer db.Close()
if err != nil {
log.Fatalf("Could not connect to DB: %v", err)
}
userRepo := &UserRepository{db}
tokenService := &TokenService{userRepo}
authService := &Service{userRepo, tokenService}
r := NewRouter(authService)
http.Handle("/", r)
http.ListenAndServe(":40000", nil)
}
|
#!/bin/bash
source "$(dirname "${BASH_SOURCE}")/../../hack/lib/init.sh"
trap os::test::junit::reconcile_output EXIT
os::test::junit::declare_suite_start "cmd/quota"
os::test::junit::declare_suite_start "cmd/quota/clusterquota"
os::cmd::expect_success 'oc new-project foo --as=deads'
os::cmd::expect_success 'oc label namespace/foo owner=deads'
os::cmd::expect_success 'oc create clusterquota for-deads --project-label-selector=owner=deads --hard=secrets=10'
os::cmd::try_until_text 'oc get appliedclusterresourcequota -n foo --as deads -o name' "for-deads"
os::cmd::try_until_text 'oc describe appliedclusterresourcequota/for-deads -n foo --as deads' "secrets.*9"
os::cmd::expect_success 'oc create clusterquota for-deads-by-annotation --project-annotation-selector=openshift.io/requester=deads --hard=secrets=50'
os::cmd::expect_success 'oc new-project bar --as=deads'
os::cmd::try_until_text 'oc get appliedclusterresourcequota -n bar --as deads -o name' "for-deads-by-annotation"
os::cmd::try_until_text 'oc get appliedclusterresourcequota -n foo --as deads -o name' "for-deads-by-annotation"
os::cmd::try_until_text 'oc describe appliedclusterresourcequota/for-deads-by-annotation -n bar --as deads' "secrets.*18"
os::cmd::expect_success 'oc delete project foo'
os::cmd::expect_success 'oc delete project bar'
echo "clusterquota: ok"
os::test::junit::declare_suite_end
os::test::junit::declare_suite_start "cmd/quota/imagestreams"
os::cmd::expect_success 'oc new-project foo-2 --as=deads'
os::cmd::expect_success 'oc create quota -n foo-2 is-quota --hard openshift.io/imagestreams=1'
os::cmd::try_until_success 'oc tag -n foo-2 openshift/hello-openshift myis2:v2'
os::cmd::expect_failure_and_text 'oc tag -n foo-2 busybox mybox:v1' "Exceeded quota"
echo "imagestreams: ok"
os::test::junit::declare_suite_end
os::test::junit::declare_suite_end
|
#!/bin/bash
#
# Copyright (C) 2016 The CyanogenMod Project
# Copyright (C) 2017 The LineageOS Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
DEVICE=tulip
VENDOR=xiaomi
# Load extract_utils and do some sanity checks
MY_DIR="${BASH_SOURCE%/*}"
if [[ ! -d "$MY_DIR" ]]; then MY_DIR="$PWD"; fi
MK_ROOT="$MY_DIR"/../../..
HELPER="$MK_ROOT"/vendor/bootleggers/build/tools/extract_utils.sh
if [ ! -f "$HELPER" ]; then
echo "Unable to find helper script at $HELPER"
exit 1
fi
. "$HELPER"
while [ "$1" != "" ]; do
case $1 in
-n | --no-cleanup ) CLEAN_VENDOR=false
;;
-s | --section ) shift
SECTION=$1
CLEAN_VENDOR=false
;;
* ) SRC=$1
;;
esac
shift
done
if [ -z "$SRC" ]; then
SRC=adb
fi
# Initialize the helper
setup_vendor "$DEVICE" "$VENDOR" "$MK_ROOT" false "$CLEAN_VENDOR"
extract "$MY_DIR"/proprietary-files.txt "$SRC" "$SECTION"
GOODIX="$MK_ROOT"/vendor/"$VENDOR"/"$DEVICE"/proprietary/vendor/lib64/libgf_ca.so
sed -i "s|/system/etc/firmware|/vendor/firmware\x0\x0\x0\x0|g" $GOODIX
BLOB_ROOT="$MK_ROOT"/vendor/"$VENDOR"/"$DEVICE"/proprietary
patchelf --remove-needed vendor.xiaomi.hardware.mtdservice@1.0.so "$BLOB_ROOT"/vendor/bin/mlipayd
patchelf --remove-needed vendor.xiaomi.hardware.mtdservice@1.0.so "$BLOB_ROOT"/vendor/lib64/libmlipay.so
"$MY_DIR"/setup-makefiles.sh
|
<reponame>groomsy/custom-font-loading-demo
//
// AppDelegate.h
// FontTest
//
// Created by <NAME> on 10/30/14.
// Copyright (c) 2014 GroomsyDev. All rights reserved.
//
@import UIKit;
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@end
|
package org.opentele.server.dgks.monitoringdataset.version1_0_1.generated;
import java.net.URL;
import javax.xml.namespace.QName;
import javax.xml.ws.WebEndpoint;
import javax.xml.ws.WebServiceClient;
import javax.xml.ws.WebServiceFeature;
import javax.xml.ws.Service;
/**
* This class was generated by Apache CXF 2.6.8
* 2013-06-10T21:57:19.188+02:00
* Generated source version: 2.6.8
*
*/
@WebServiceClient(name = "MonitoringDatasetService",
wsdlLocation = "/Users/henrik/projects/opentele-server/src/wsdl/dgks/wsdl/1.0.1/MonitoringDatasetService.wsdl",
targetNamespace = "urn:oio:medcom:monitoringdataset:1.0.1")
public class MonitoringDatasetService extends Service {
public final static URL WSDL_LOCATION;
public final static QName SERVICE = new QName("urn:oio:medcom:monitoringdataset:1.0.1", "MonitoringDatasetService");
public final static QName MonitoringDatasetPort = new QName("urn:oio:medcom:monitoringdataset:1.0.1", "MonitoringDatasetPort");
static {
URL url = MonitoringDatasetService.class.getResource("/Users/henrik/projects/opentele-server/src/wsdl/dgks/wsdl/1.0.1/MonitoringDatasetService.wsdl");
if (url == null) {
url = MonitoringDatasetService.class.getClassLoader().getResource("/Users/henrik/projects/opentele-server/src/wsdl/dgks/wsdl/1.0.1/MonitoringDatasetService.wsdl");
}
if (url == null) {
java.util.logging.Logger.getLogger(MonitoringDatasetService.class.getName())
.log(java.util.logging.Level.INFO,
"Can not initialize the default wsdl from {0}", "/Users/henrik/projects/opentele-server/src/wsdl/dgks/wsdl/1.0.1/MonitoringDatasetService.wsdl");
}
WSDL_LOCATION = url;
}
public MonitoringDatasetService(URL wsdlLocation) {
super(wsdlLocation, SERVICE);
}
public MonitoringDatasetService(URL wsdlLocation, QName serviceName) {
super(wsdlLocation, serviceName);
}
public MonitoringDatasetService() {
super(WSDL_LOCATION, SERVICE);
}
/**
*
* @return
* returns MonitoringDatasetPortType
*/
@WebEndpoint(name = "MonitoringDatasetPort")
public MonitoringDatasetPortType getMonitoringDatasetPort() {
return super.getPort(MonitoringDatasetPort, MonitoringDatasetPortType.class);
}
/**
*
* @param features
* A list of {@link javax.xml.ws.WebServiceFeature} to configure on the proxy. Supported features not in the <code>features</code> parameter will have their default values.
* @return
* returns MonitoringDatasetPortType
*/
@WebEndpoint(name = "MonitoringDatasetPort")
public MonitoringDatasetPortType getMonitoringDatasetPort(WebServiceFeature... features) {
return super.getPort(MonitoringDatasetPort, MonitoringDatasetPortType.class, features);
}
}
|
<gh_stars>0
package com.java110.things.sip;
import com.java110.things.sip.codec.Frame;
import com.java110.things.sip.handler.UDPHandler;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioDatagramChannel;
import io.netty.util.concurrent.Future;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ConcurrentLinkedDeque;
/*
* UDP 由于在外网环境下(特别是4G),乱序 丢包情况较为严重
* 所以处理方式和TCP有所区别,不在Handler中直接判断是I/P/分包数据/
* 先将每个包按照rtp头中的seq来缓存,再做组包、解析、推流
*/
public class UDPServer extends Server {
private String TAG = this.getClass().getSimpleName();
private Logger log = LoggerFactory.getLogger(getClass());
private volatile boolean isRunning = false;
private Bootstrap bootstrap = null;
private EventLoopGroup workerGroup = null;
private void bind(int port, int ssrc, boolean checkSsrc) throws Exception {
workerGroup = new NioEventLoopGroup();
try {
bootstrap = new Bootstrap();
bootstrap.group(workerGroup)//
.channel(NioDatagramChannel.class) //
.option(ChannelOption.SO_RCVBUF, 1024 * 1024)
.handler(new ChannelInitializer<NioDatagramChannel>() { //
@Override
public void initChannel(NioDatagramChannel ch) throws Exception {
ch.pipeline().addLast(new UDPHandler(ssrc, checkSsrc, UDPServer.this,port));
}
});
this.log.info("UDP服务启动成功port:{}", port);
bootstrap.bind(port).sync().channel().closeFuture().sync();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
workerGroup.shutdownGracefully();
}
}
@Override
public void startServer(ConcurrentLinkedDeque<Frame> frameDeque, int ssrc, int port, boolean checkSsrc) {
if (this.isRunning) {
throw new IllegalStateException(TAG + " is already started .");
}
this.isRunning = true;
new Thread(() -> {
try {
this.bind(port, ssrc, checkSsrc);
} catch (Exception e) {
this.log.info("{}服务启动出错:{}", TAG, e.getMessage());
e.printStackTrace();
}
}, TAG).start();
}
@Override
public void stopServer() {
if (!this.isRunning) {
throw new IllegalStateException(TAG + " is not yet started .");
}
this.isRunning = false;
try {
Future<?> future = this.workerGroup.shutdownGracefully().await();
if (!future.isSuccess()) {
log.error("workerGroup 无法正常停止:{}", future.cause());
}
} catch (InterruptedException e) {
e.printStackTrace();
}
this.log.info("UDPServer服务已经停止...");
}
}
|
<reponame>AkashBalani/AWS_CICD_Serverless
package com.csye6225.noteapp.Dao;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.csye6225.noteapp.model.UserEntity;
import com.csye6225.noteapp.repository.UserRepository;
@Service
public class UserDao implements UserDaoInterface {
@Autowired
UserRepository userRepository;
public UserEntity findUser(String email) {
return userRepository.findUser(email);
}
public UserEntity save(UserEntity users) {
return userRepository.save(users);
}
}
|
#!/bin/bash
source "/vagrant/scripts/common.sh"
echo "setup metrics"
cp -r /vagrant/metrics /usr/local
mkdir -p /vagrant/metrics/data
|
package com.woolta.blog.repository;
import com.woolta.blog.domain.PostFile;
import org.springframework.data.repository.CrudRepository;
public interface PostFileRepository extends CrudRepository<PostFile, Integer> {
}
|
sentence = ''
for word in words:
sentence += word+' '
sentence = sentence[:-1] + '.'
print(sentence)
|
TERMUX_PKG_HOMEPAGE=http://www.cityinthesky.co.uk/opensource/pdf2svg/
TERMUX_PKG_DESCRIPTION="A PDF to SVG converter"
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=0.2.3
TERMUX_PKG_REVISION=3
TERMUX_PKG_SRCURL=https://github.com/db9052/pdf2svg/archive/v$TERMUX_PKG_VERSION.tar.gz
TERMUX_PKG_SHA256=4fb186070b3e7d33a51821e3307dce57300a062570d028feccd4e628d50dea8a
TERMUX_PKG_AUTO_UPDATE=true
TERMUX_PKG_DEPENDS="glib, libcairo, poppler"
|
<gh_stars>1-10
import {
AbstractGrantType,
InvalidArgumentError,
InvalidRequestError,
InvalidTokenError,
} from 'oauth2-server';
import axios from 'axios';
const url = 'https://oauth2.googleapis.com/tokeninfo';
class GoogleGrantType extends AbstractGrantType {
constructor(options = {}) {
super(options);
if (!options.model) {
throw new InvalidArgumentError('Missing parameter: `model`');
}
if (!options.model.getUserWithGoogle) {
throw new InvalidArgumentError(
'Invalid argument: model does not implement `getUserWithGoogle()`'
);
}
this.validateClientId = this.model.googleGrantType?.validateClientId ?? true;
const clientId = this.model.googleGrantType?.clientId;
if (clientId) {
this.clientIds = Array.isArray(clientId) ? clientId : Array(clientId);
}
if (this.validateClientId && this.clientIds.length === 0) {
throw new InvalidArgumentError(
'Invalid argument: Google valid clientId must be provided in options'
);
}
if (!options.model.saveToken) {
throw new InvalidArgumentError(
'Invalid argument: model does not implement `saveToken()`'
);
}
this.handle = this.handle.bind(this);
this.getUser = this.getUser.bind(this);
this.saveToken = this.saveToken.bind(this);
}
async handle(request, client) {
if (!request) {
throw new InvalidArgumentError('Missing parameter: `request`');
}
if (!client) {
throw new InvalidArgumentError('Missing parameter: `client`');
}
const scope = this.getScope(request);
const user = await this.getUser(request);
return await this.saveToken(user, client, scope);
}
async getUser(request) {
const token = request.body.google_id_token;
if (!token) {
throw new InvalidRequestError('Missing parameter: `google_id_token`');
}
let data;
try {
const response = await axios.get(url, {
params: { id_token: token },
});
data = response.data;
} catch (err) {
throw new InvalidTokenError('Google id token is invalid or expired');
}
if (this.validateClientId && !this.clientIds.includes(data.aud)) {
throw new InvalidTokenError(
'You cannot use this Google ID Token with this grant type'
);
}
return await this.model.getUserWithGoogle(data);
}
async saveToken(user, client, scope) {
const scopeData = await this.validateScope(user, client, scope);
const accessToken = await this.generateAccessToken(client, user, scope);
const refreshToken = await this.generateRefreshToken(client, user, scope);
const accessTokenExpiresAt = this.getAccessTokenExpiresAt();
const refreshTokenExpiresAt = await this.getRefreshTokenExpiresAt();
const token = {
accessToken,
accessTokenExpiresAt,
refreshToken,
refreshTokenExpiresAt,
scope: scopeData,
};
return await this.model.saveToken(token, client, user);
}
}
export default GoogleGrantType;
|
<filename>pkg/scheduler/job.go
// Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE.txt file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package scheduler
import (
"sync"
"time"
"github.com/elastic/elasticsearch-adapter/pkg/client"
"github.com/elastic/elasticsearch-adapter/pkg/config"
"k8s.io/klog/v2"
"sigs.k8s.io/custom-metrics-apiserver/pkg/provider"
)
type Job interface {
start()
GetClient() client.Interface
WithMetricListeners(listeners ...MetricListener) Job
WithErrorListeners(listeners ...ErrorListener) Job
}
var _ Job = &metricJob{}
func newMetricJob(c client.Interface, wg *sync.WaitGroup) Job {
return &metricJob{
c: c,
wg: wg,
}
}
type metricJob struct {
c client.Interface
wg *sync.WaitGroup
syncDone sync.Once
listeners []MetricListener
errorListeners []ErrorListener
previousCustomMetrics map[provider.CustomMetricInfo]struct{}
previousExternalMetrics map[provider.ExternalMetricInfo]struct{}
}
func (m *metricJob) start() {
go func() {
// Attempt to get a first set of metrics
m.refreshMetrics()
dateTicker := time.NewTicker(1 * time.Minute)
for range dateTicker.C {
m.refreshMetrics()
}
}()
}
func (m *metricJob) refreshMetrics() {
if m.GetClient().GetConfiguration().MetricTypes.HasType(config.CustomMetricType) {
customMetrics, err := m.c.ListCustomMetricInfos()
if err != nil {
klog.Errorf(
"Failed to update custom metric list from %s / %s : %v",
m.GetClient().GetConfiguration().Name,
m.GetClient().GetConfiguration().ClientConfig.Host,
err,
)
m.publishError(config.CustomMetricType, err)
return
}
klog.V(1).Infof(
"%d custom metrics from %s / %s",
len(customMetrics),
m.GetClient().GetConfiguration().Name,
m.GetClient().GetConfiguration().ClientConfig.Host,
)
for _, listener := range m.listeners {
listener.UpdateCustomMetrics(m.c, customMetrics)
}
}
if m.GetClient().GetConfiguration().MetricTypes.HasType(config.ExternalMetricType) {
externalMetrics, err := m.c.ListExternalMetrics()
if err != nil {
klog.Errorf(
"Failed to update external metric list from %s / %s : %v",
m.GetClient().GetConfiguration().Name,
m.GetClient().GetConfiguration().ClientConfig.Host,
err,
)
m.publishError(config.ExternalMetricType, err)
return
}
klog.V(1).Infof(
"%d external metrics from %s / %s",
len(externalMetrics),
m.GetClient().GetConfiguration().Name,
m.GetClient().GetConfiguration().ClientConfig.Host,
)
for _, listener := range m.listeners {
listener.UpdateExternalMetrics(m.c, externalMetrics)
}
}
m.syncDone.Do(func() {
klog.V(1).Infof(
"First sync successful from %s / %s",
m.GetClient().GetConfiguration().Name,
m.GetClient().GetConfiguration().ClientConfig.Host,
)
m.wg.Done()
})
}
func (m *metricJob) publishError(metricType config.MetricType, err error) {
for _, listener := range m.errorListeners {
listener.OnError(m.c, metricType, err)
}
}
func (m *metricJob) GetClient() client.Interface {
return m.c
}
func (m *metricJob) WithMetricListeners(listeners ...MetricListener) Job {
m.listeners = append(m.listeners, listeners...)
return m
}
func (m *metricJob) WithErrorListeners(listeners ...ErrorListener) Job {
m.errorListeners = append(m.errorListeners, listeners...)
return m
}
|
<reponame>webmaeistro/vipps-developers
package vippsKeys;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
public class VIPPS_InitiatePaymentResponseJSON {
@SerializedName("orderId")
@Expose
private String orderId;
@SerializedName("url")
@Expose
private String url;
public String getOrderId() {
return orderId;
}
public void setOrderId(String orderId) {
this.orderId = orderId;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
}
|
def compute(a,b):
return a + b
def func1():
result = compute(2,3)
print(result)
|
from classes import Query, BaseQueryResolver
META = {
'__version__':"0.0.1",
'author':"Dev",
'author_email':"<EMAIL>",
'description':"This is an example for a QueryResolver"
}
class ExampleResolver(BaseQueryResolver):
def __init__(self):
super().__init__() # Does nothing atm
from settings import EXAMPLE
_ = EXAMPLE['setting']
# Instantiate the class and export in in the EXPORTS dict
EXPORTS = {
'resolver':ExampleResolver(),
'metadata': META,
'enabled': False
}
|
#!/bin/bash
# prereq
python -m build --wheel src
name=$(cd src/dist; ls databrickscicd*.whl)
databricks fs mkdirs ${DATABRICKS_DBFS_PATH}
# setup
export DATABRICKS_LIBRARY_PATH=${DATABRICKS_DBFS_PATH}/${name}
databricks fs cp --overwrite src/dist/${name} ${DATABRICKS_DBFS_PATH}
databricks workspace import --overwrite src/main_notebook.py --language PYTHON ${DATABRICKS_WORKSPACE_PATH}/main_notebook.py
# execute
pytest --cache-clear test/test_main_notebook.py
# tear down
databricks fs rm ${DATABRICKS_DBFS_PATH}/${name}
databricks workspace delete ${DATABRICKS_WORKSPACE_PATH}/main_notebook.py
|
#!/bin/bash
cd ..
python3 webserv.py config.cfg &
PID=$!
cd -> /dev/null
sleep 1
curl -I 127.0.0.1:8070/ | grep '200 OK' | diff - index_status_expected.out
kill $PID
|
package com.sohu.tv.mq.cloud.service;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.sohu.tv.mq.cloud.bo.Cluster;
import com.sohu.tv.mq.cloud.bo.Topic;
import com.sohu.tv.mq.cloud.bo.TopicTraffic;
import com.sohu.tv.mq.cloud.bo.TopicTrafficCheckResult;
import com.sohu.tv.mq.cloud.bo.TopicTrafficStat;
import com.sohu.tv.mq.cloud.bo.TopicTrafficWarnConfig;
import com.sohu.tv.mq.cloud.bo.User;
import com.sohu.tv.mq.cloud.util.Jointer;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import com.sohu.tv.mq.cloud.dao.TopicTrafficStatDao;
import com.sohu.tv.mq.cloud.util.DateUtil;
import com.sohu.tv.mq.cloud.util.MQCloudConfigHelper;
import com.sohu.tv.mq.cloud.util.Result;
/**
* @author yongweizhao
* @create 2020/8/3 17:06
*/
@Service
public class TopicTrafficStatService {
private Logger logger = LoggerFactory.getLogger(this.getClass());
private static final int ONE_MIN = 1 * 60 * 1000;
private static final int FIVE_MIN_BEFORE = 5;
@Autowired
private ClusterService clusterService;
@Autowired
private TopicService topicService;
@Autowired
private MQCloudConfigHelper mqCloudConfigHelper;
@Autowired
private AlertService alertService;
@Autowired
private TopicTrafficService topicTrafficService;
@Autowired
private TopicTrafficWarnConfigService topicTrafficWarnConfigService;
@Autowired
private UserService userService;
@Autowired
private TopicTrafficStatDao topicTrafficStatDao;
/**
* 保存topic统计流量
* @param topicTrafficStat
* @return
*/
public Result<TopicTrafficStat> save(TopicTrafficStat topicTrafficStat) {
try {
topicTrafficStatDao.insertAndUpdate(topicTrafficStat);
} catch (Exception e) {
logger.error("insert err, topicTrafficStat:{}", topicTrafficStat, e);
return Result.getDBErrorResult(e);
}
return Result.getResult(topicTrafficStat);
}
/**
* 根据tid查询统计信息
* @param tid
* @return
*/
public Result<TopicTrafficStat> query(long tid) {
TopicTrafficStat topicTrafficStat = null;
try {
topicTrafficStat = topicTrafficStatDao.select(tid);
} catch (Exception e) {
logger.error("queryTopicTrafficStat err, tid:{}", tid, e);
return Result.getDBErrorResult(e);
}
return Result.getResult(topicTrafficStat);
}
/**
* 查询所有的tid列表
*/
public List<Long> queryAllTid() {
List<Long> list = null;
try {
list = topicTrafficStatDao.selectAllTid();
} catch (Exception e) {
logger.error("queryAllTid err,", e);
}
return list;
}
/**
* 删除统计信息
*/
public Result<Integer> delete(List<Long> tidList) {
Integer count;
try {
count = topicTrafficStatDao.delete(tidList);
} catch (Exception e) {
logger.error("del topicTrafficStat err, tidList:{}", StringUtils.join(tidList, ","), e);
return Result.getDBErrorResult(e);
}
return Result.getResult(count);
}
/**
* 获取所有开启了流量预警功能的topic
*/
public List<Topic> queryTrafficWarnEnabledTopicList() {
if (clusterService.getAllMQCluster() == null) {
logger.warn("mqcluster is null");
return null;
}
List<Topic> topicList = new ArrayList<>();
for (Cluster mqCluster : clusterService.getAllMQCluster()) {
Result<List<Topic>> topicListResult = topicService.queryTrafficWarnEnabledTopicList(mqCluster);
if (topicListResult.isNotEmpty()) {
topicList.addAll(topicListResult.getResult());
}
}
return topicList;
}
/**
* 流量统计分析
*/
public void trafficStatAll() {
List<Topic> topicList = queryTrafficWarnEnabledTopicList();
if (CollectionUtils.isEmpty(topicList)) {
return;
}
// 获取所有topic流量阈值配置
Result<List<TopicTrafficWarnConfig>> configResult = topicTrafficWarnConfigService.queryAll();
if (configResult.isEmpty()) {
return;
}
Map<String, TopicTrafficWarnConfig> configMap = new HashMap<>();
TopicTrafficWarnConfig defaultConfig = null;
Iterator<TopicTrafficWarnConfig> iterator = configResult.getResult().iterator();
while (iterator.hasNext()) {
TopicTrafficWarnConfig config = iterator.next();
if (StringUtils.isBlank(config.getTopic())) {
defaultConfig = config;
} else {
configMap.put(config.getTopic(), config);
}
}
// 获取当天时间
String date = DateUtil.getFormatNow(DateUtil.YMD_DASH);
for (Topic topic : topicList) {
Result<List<TopicTraffic>> topicTrafficResult = topicTrafficService.queryRangeTraffic(topic.getId(), date);
if (topicTrafficResult.isNotEmpty()) {
TrafficStatCheckStrategy strategy = null;
TopicTrafficWarnConfig strategyConfig = null;
if (configMap.containsKey(topic.getName())) {
strategyConfig = configMap.get(topic.getName());
strategyConfig.copyProperties(defaultConfig);
strategy = new TrafficSimpleStatStrategy(topicTrafficResult.getResult(), strategyConfig);
} else {
strategy = new TrafficSimpleStatStrategy(topicTrafficResult.getResult(), defaultConfig);
}
stat(strategy);
}
}
}
// 统计tid一天的流量
private void stat(TrafficStatCheckStrategy strategy) {
TopicTrafficStat topicTrafficStat = strategy.stat();
if (topicTrafficStat != null) {
logger.info("topic traffic stat:{}", topicTrafficStat.toString());
save(topicTrafficStat);
}
}
/**
* 流量监测
*/
public void check(List<Topic> topicList) {
// 1. 获取topic流量阈值配置
Result<List<TopicTrafficWarnConfig>> configResult = topicTrafficWarnConfigService.queryAll();
if (configResult.isEmpty()) {
return;
}
Map<String, TopicTrafficWarnConfig> configMap = new HashMap<>();
TopicTrafficWarnConfig defaultConfig = null;
Iterator<TopicTrafficWarnConfig> iterator = configResult.getResult().iterator();
while (iterator.hasNext()) {
TopicTrafficWarnConfig config = iterator.next();
if (StringUtils.isBlank(config.getTopic())) {
defaultConfig = config;
} else {
configMap.put(config.getTopic(), config);
}
}
// 当天时间
String date = DateUtil.getFormatNow(DateUtil.YMD_DASH);
// 5分钟前时间间隔
List<String> timeList = getBeforeTimes(FIVE_MIN_BEFORE);
for (Topic topic : topicList) {
// 2. 获取前5分钟topic流量列表
long tid = topic.getId();
Result<List<TopicTraffic>> topicTrafficResult = topicTrafficService.queryRangeTraffic(tid, date, timeList);
if (topicTrafficResult.isEmpty()) {
continue;
}
// 3. 获取统计结果
Result<TopicTrafficStat> topicTrafficStatResult = query(tid);
if (topicTrafficStatResult.isNotOK()) {
continue;
}
// 4. 构建strategy
TopicTrafficWarnConfig config = null;
if (configMap.containsKey(topic.getName())) {
config = configMap.get(topic.getName());
config.copyProperties(defaultConfig);
} else {
config = defaultConfig;
}
TrafficStatCheckStrategy strategy = new TrafficSimpleStatStrategy(topicTrafficStatResult.getResult(), config);
// 5. 检测
if (config.isAlert()) {
List<TopicTrafficCheckResult> checkResult = strategy.check(topicTrafficResult.getResult());
// 6. 告警
sendAlert(checkResult, config, topic);
}
}
}
/**
* 获取beforeTime前到当前时间之间的时间集合,格式HHMM
*/
private List<String> getBeforeTimes(int beforeTime) {
Date now = new Date();
// 计算前beforeTime分钟间隔
List<String> timeList = new ArrayList<String>();
Date begin = new Date(now.getTime() - beforeTime * ONE_MIN + 30);
while (begin.before(now)) {
String time = DateUtil.getFormat(DateUtil.HHMM).format(begin);
timeList.add(time);
begin.setTime(begin.getTime() + ONE_MIN);
}
return timeList;
}
// 发送报警
public void sendAlert(List<TopicTrafficCheckResult> checkResultList, TopicTrafficWarnConfig config, Topic topic) {
if (CollectionUtils.isEmpty(checkResultList) || !config.isAlert()) {
return;
}
StringBuilder content = new StringBuilder("详细如下:<br><br>");
content.append("topic: <b>");
content.append(mqCloudConfigHelper.getTopicProduceLink(topic.getId(), topic.getName()));
content.append(" 检测到流量异常: <br>");
content.append("<table border=1>");
content.append("<thead>");
content.append("<tr>");
content.append("<th>时间</th>");
content.append("<th>详情</th>");
content.append("</tr>");
content.append("</thead>");
content.append("<tbody>");
for (TopicTrafficCheckResult checkResult : checkResultList) {
String warnTime = checkResult.getTime();
String warnInfo = checkResult.getWarnInfo();
content.append("<tr>");
content.append("<td>");
content.append(warnTime);
content.append("</td>");
content.append("<td>");
content.append(warnInfo);
content.append("</td>");
content.append("</tr>");
}
content.append("</tbody>");
content.append("</table>");
// 根据配置发送给不同的报警接收人
String email = getAlarmReceiverEmails(config.getAlarmReceiver(), topic.getId());
alertService.sendWarnMail(email, "topic流量", content.toString());
}
/**
* 获取报警人
*/
private String getAlarmReceiverEmails(int alarmType, long topicId) {
Set<User> userSet = new HashSet<>();
Result<List<User>> producerUserListResult = null;
Result<List<User>> consumerUserListResult = null;
switch (alarmType) {
case 0:
producerUserListResult = userService.queryProducerUserList(topicId);
consumerUserListResult = userService.queryConsumerUserList(topicId);
break;
case 1:
producerUserListResult = userService.queryProducerUserList(topicId);
break;
case 2:
consumerUserListResult = userService.queryConsumerUserList(topicId);
break;
case 3:
break;
default:
}
if (producerUserListResult != null && producerUserListResult.isNotEmpty()) {
userSet.addAll(producerUserListResult.getResult());
}
if (consumerUserListResult != null && consumerUserListResult.isNotEmpty()) {
userSet.addAll(consumerUserListResult.getResult());
}
String email = Jointer.BY_COMMA.join(userSet, u -> u.getEmail());
return email;
}
}
|
'''This job updates the minute-by-minute trading data for the whole stock universe.
'''
'''
Copyright (c) 2017, WinQuant Information and Technology Co. Ltd.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
# built-in modules
import datetime as dt
import logging
# third-party modules
# customized modules
import data.api.stocks as stockApi
import data.config as config
import data.driver.mongodb as mongodb
import data.instrument.trading.stocks as stockTrading
# customize logging configure
logging.basicConfig( format='[%(levelname)s] %(message)s', level=logging.INFO )
def main():
'''Entry point of the job.
'''
# runtime
asOfDate = dt.date.today()
logging.info( 'Updating minute bin data for stocks on date {d:s}...'.format( d=str( asOfDate ) ) )
# get all stocks in the universe
universe = stockApi.getExchangeStockNames( asOfDate )
# initialize MongoDB connection
username, password = config.MONGODB_CRED
db = mongodb.getAuthenticatedConnection( config.MONGODB_URL,
config.MONGODB_PORT, username, password, 'binData' )
nStocks = len( universe )
logging.info( 'Minute bin volume for {ns:d} stocks in total to be updated...'.format( ns=nStocks ) )
# for bin data, stocks are updated one-by-one
for i, stock in enumerate( universe ):
logging.info( 'Updating minute bin data for {s:s} ({idx:d}/{n:d})...'.format( s=stock, idx=i + 1, n=nStocks ) )
data = stockTrading.getBinData( stock, dataDate=asOfDate )
mongoDate = dt.datetime.combine( asOfDate, dt.datetime.min.time() )
record = { 'SecID': stock,
'Date': mongoDate,
'Data': data.to_json(),
'Country': 'CN' }
db.stocks.update( { 'SecID': stock, 'Date': mongoDate, 'Country': 'CN' }, record, upsert=True )
logging.info( 'All stocks updated.' )
if __name__ == '__main__':
# let's kick off the job
main()
|
~/arduino-cli/bin/arduino-cli compile --fqbn arduino:avr:mega motor_controller
OUT=$?
if [ $OUT -eq 0 ];then
~/arduino-cli/bin/arduino-cli upload -p /dev/ttyACM1 --fqbn arduino:avr:mega motor_controller -v
else
echo "****ERROR*****"
fi
|
import { Component } from '@angular/core';
@Component({
selector: 'my-app',
templateUrl: './app.component.html',
styleUrls: [ './app.component.css' ]
})
export class AppComponent {
name = 'Angular';
strings = ["Hello", "World"];
concatenatedString = '';
constructor() {
this.concatenatedString = this.strings.join(' ');
}
}
|
#!/usr/bin/env sh
# generated from catkin/python/catkin/environment_cache.py
# based on a snapshot of the environment before and after calling the setup script
# it emulates the modifications of the setup script without recurring computations
# new environment variables
# modified environment variables
export CMAKE_PREFIX_PATH="/home/kalyco/mfp_workspace/devel/.private/pcl_ros:$CMAKE_PREFIX_PATH"
export PWD="/home/kalyco/mfp_workspace/build/pcl_ros"
export ROSLISP_PACKAGE_DIRECTORIES="/home/kalyco/mfp_workspace/devel/.private/pcl_ros/share/common-lisp:$ROSLISP_PACKAGE_DIRECTORIES"
export ROS_PACKAGE_PATH="/home/kalyco/mfp_workspace/src/perception_pcl/pcl_ros:$ROS_PACKAGE_PATH"
|
def top_words(text):
words = text.split()
word_counts = {}
# count the words in the given text
for word in words:
if word in word_counts.keys():
word_counts[word] += 1
else:
word_counts[word] = 1
# sort the words by count in descending order
sorted_words = sorted(word_counts.items(), key=lambda x: x[1], reverse=True)
# get the top 5 most frequent words
top_5_words = []
for word in sorted_words[:5]:
top_5_words.append(word[0])
return top_5_words
|
const noop = function () {}
const config = {
max: Infinity,
directionKey: 'direction',
isSingleMode: true,
isDebugger: false,
getHistoryStack: noop,
setHistoryStack: noop,
}
export default config
|
<gh_stars>0
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { MatFormFieldModule } from '@angular/material/form-field';
import { MatInputModule } from '@angular/material/input';
import { NoopAnimationsModule } from '@angular/platform-browser/animations';
import { RouterTestingModule } from '@angular/router/testing';
import { stubCoreServiceProvider } from '../../../../../../core/services/core.service.stub';
import { stubServiceLogsServiceProvider } from '../../services/service-logs.service.stub';
import { ServiceLogsComponent } from './service-logs.component';
describe('ServiceLogsComponent', () => {
let component: ServiceLogsComponent;
let fixture: ComponentFixture<ServiceLogsComponent>;
beforeEach(async () => {
await TestBed.configureTestingModule({
imports: [RouterTestingModule, MatFormFieldModule, MatInputModule, NoopAnimationsModule],
declarations: [ServiceLogsComponent],
providers: [stubServiceLogsServiceProvider, stubCoreServiceProvider],
}).compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(ServiceLogsComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
import java.sql.Connection;
import java.sql.DriverManager;
public class DBHandler {
Connection dbConnection;
public DBHandler() {
try {
Class.forName("com.mysql.jdbc.Driver");
dbConnection = DriverManager.getConnection(url, userName, password);
} catch (Exception e) {
e.printStackTrace();
}
}
public void insertData(String sqlQuery) {
try {
Statement statementObject = dbConnection.createStatement();
statementObject.executeUpdate(sqlQuery);
} catch (Exception e) {
e.printStackTrace();
}
}
public void deleteData(String sqlQuery) {
try {
Statement statementObject = dbConnection.createStatement();
statementObject.executeUpdate(sqlQuery);
} catch (Exception e) {
e.printStackTrace();
}
}
public void updateData(String sqlQuery) {
try {
Statement statementObject = dbConnection.createStatement();
statementObject.executeUpdate(sqlQuery);
} catch (Exception e) {
e.printStackTrace();
}
}
public String selectData(String sqlQuery) {
try {
Statement statementObject = dbConnection.createStatement();
ResultSet resultSetObject = statementObject.executeQuery(sqlQuery);
String resultString = "";
while (resultSetObject.next()) {
// Get the column data
int Id = resultSetObject.getInt("Id");
String Name = resultSetObject.getString("Name");
// Append the data to the string
resultString += "Id: " + Id + " Name: " + Name + " \n
}
return resultString;
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
'use strict';
class EyeBagsRemoval {
enable() {
bnb.scene.enableRecognizerFeature(bnb.FeatureID.EYE_BAGS);
return this
}
disable() {
bnb.scene.disableRecognizerFeature(bnb.FeatureID.EYE_BAGS);
return this
}
}
exports.EyeBagsRemoval = EyeBagsRemoval;
|
require 'rubygems'
begin
require 'pryx'
rescue Exception => e
# it would be cool but-:)
end
require 'fileutils'
require 'rubygems'
require 'test/unit'
require 'tempfile'
$LOAD_PATH.unshift(File.dirname(__FILE__))
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require 'linux/lxc'
class LinuxLxcTest < Test::Unit::TestCase
def setup
@temp_dir = Dir.mktmpdir
@lxc_config = File.join(@temp_dir, 'lxc.config')
File.write(@lxc_config, <<SAMPLE)
# Template used to create this container: /usr/share/lxc/templates/lxc-ubuntu
# Parameters passed to the template:
# For additional config options, please look at lxc.container.conf(5)
# Common configuration
lxc.include = #{@temp_dir}/ubuntu.common.conf
# Container specific configuration
lxc.rootfs = /var/lib/lxc/border-eth0/rootfs
lxc.mount = /var/lib/lxc/border-eth0/fstab
lxc.utsname = border-eth0
lxc.arch = amd64
# Network configuration
lxc.network.type = veth
lxc.network.flags = up
lxc.network.link = lxcbr0
lxc.network.hwaddr = 00:16:3e:67:03:4a
SAMPLE
@lxc_ubuntu_common_conf = File.join(@temp_dir, 'ubuntu.common.conf')
File.write(@lxc_ubuntu_common_conf, <<SAMPLE)
# Default pivot location
lxc.pivotdir = lxc_putold
# Default mount entries
lxc.mount.entry = proc proc proc nodev,noexec,nosuid 0 0
lxc.mount.entry = sysfs sys sysfs defaults 0 0
lxc.mount.entry = /sys/fs/fuse/connections sys/fs/fuse/connections none bind,optional 0 0
lxc.mount.entry = /sys/kernel/debug sys/kernel/debug none bind,optional 0 0
lxc.mount.entry = /sys/kernel/security sys/kernel/security none bind,optional 0 0
lxc.mount.entry = /sys/fs/pstore sys/fs/pstore none bind,optional 0 0
# Default console settings
lxc.devttydir = lxc
lxc.tty = 4
lxc.pts = 1024
# Default capabilities
lxc.cap.drop = sys_module mac_admin mac_override sys_time
# When using LXC with apparmor, the container will be confined by default.
# If you wish for it to instead run unconfined, copy the following line
# (uncommented) to the container's configuration file.
#lxc.aa_profile = unconfined
# To support container nesting on an Ubuntu host while retaining most of
# apparmor's added security, use the following two lines instead.
#lxc.aa_profile = lxc-container-default-with-nesting
#lxc.mount.auto = cgroup:mixed
# Uncomment the following line to autodetect squid-deb-proxy configuration on the
# host and forward it to the guest at start time.
#lxc.hook.pre-start = /usr/share/lxc/hooks/squid-deb-proxy-client
# If you wish to allow mounting block filesystems, then use the following
# line instead, and make sure to grant access to the block device and/or loop
# devices below in lxc.cgroup.devices.allow.
#lxc.aa_profile = lxc-container-default-with-mounting
# Default cgroup limits
lxc.cgroup.devices.deny = a
## Allow any mknod (but not using the node)
lxc.cgroup.devices.allow = c *:* m
lxc.cgroup.devices.allow = b *:* m
## /dev/null and zero
lxc.cgroup.devices.allow = c 1:3 rwm
lxc.cgroup.devices.allow = c 1:5 rwm
## consoles
lxc.cgroup.devices.allow = c 5:0 rwm
lxc.cgroup.devices.allow = c 5:1 rwm
## /dev/{,u}random
lxc.cgroup.devices.allow = c 1:8 rwm
lxc.cgroup.devices.allow = c 1:9 rwm
## /dev/pts/*
lxc.cgroup.devices.allow = c 5:2 rwm
lxc.cgroup.devices.allow = c 136:* rwm
## rtc
lxc.cgroup.devices.allow = c 254:0 rm
## fuse
lxc.cgroup.devices.allow = c 10:229 rwm
## tun
lxc.cgroup.devices.allow = c 10:200 rwm
## full
lxc.cgroup.devices.allow = c 1:7 rwm
## hpet
lxc.cgroup.devices.allow = c 10:228 rwm
## kvm
lxc.cgroup.devices.allow = c 10:232 rwm
## To use loop devices, copy the following line to the container's
## configuration file (uncommented).
#lxc.cgroup.devices.allow = b 7:* rwm
lxc.net.0.type = veth
lxc.net.0.flags = up
lxc.net.0.link = br-int
lxc.net.0.hwaddr = 00:16:4e:80:72:ab
lxc.net.0.name = br-int
lxc.net.1.type = phys
lxc.net.1.flags = up
lxc.net.1.link = eth1
lxc.net.1.name = lte
# Blacklist some syscalls which are not safe in privileged
# containers
lxc.seccomp = /usr/share/lxc/config/common.seccomp
lxc.include = #{File.join(@temp_dir, 'empty.conf.d')}
lxc.include = #{File.join(@temp_dir, 'common.conf.d')}
SAMPLE
FileUtils.mkdir_p File.join(@temp_dir, 'empty.conf.d')
FileUtils.mkdir_p File.join(@temp_dir, 'common.conf.d')
@lxc_common_conf_d_wildcard = File.join(@temp_dir, 'common.conf.d', 'wildcard.conf')
File.write(@lxc_common_conf_d_wildcard, <<SAMPLE)
lxc.wildcard.loaded = true
lxc.hook.mount = /usr/share/lxcfs/lxc.mount.hook
lxc.hook.post-stop = /usr/share/lxcfs/lxc.reboot.hook
SAMPLE
end
def teardown
FileUtils.remove_entry_secure @temp_dir
end
def test_reader
lxc = Linux::Lxc.parse(@lxc_config)
assert_equal lxc.get('lxc').length, 52
assert_equal lxc.get('lxc.network').length, 4
assert_equal lxc.get('lxc.network.hwaddr').length, 1
assert_equal lxc.get('lxc.network.murks'), nil
assert_equal lxc.get('lxc.wildcard.loaded').values[0], 'true'
assert_equal lxc.get('lxc.wildcard.loaded')[0].file, @lxc_common_conf_d_wildcard
assert_equal lxc.get('lxc.wildcard.loaded')[0].line, 1
assert_equal lxc.get('lxc.cgroup.devices.allow').values[4], 'c 5:0 rwm'
assert_equal lxc.get('lxc.cgroup.devices.allow')[4].file, @lxc_ubuntu_common_conf
assert_equal lxc.get('lxc.cgroup.devices.allow')[4].line, 48
assert_equal lxc.get('lxc.network.hwaddr').values, ['00:16:3e:67:03:4a']
assert_equal lxc.get('lxc.network.hwaddr').first.file, @lxc_config
assert_equal lxc.get('lxc.network.hwaddr').first.line, 18
end
def test_from_scratch
lxc = Linux::Lxc.file(File.join(@temp_dir, 'base.f'))
lxc.add('# base meno')
lxc.add('lxc.cgroup.devices.allow', 'meno')
incl = Linux::Lxc.file(File.join(@temp_dir, 'incl.f.conf'))
lxc.add('lxc.include', incl)
incl.add('# include meno')
incl.add('lxc.network.hwaddr', '00:16:3e:67:03:4a')
empty_d = Linux::Lxc.directory(File.join(@temp_dir, 'scratch.empty.d'))
lxc.add('lxc.include', empty_d)
scratch_d = Linux::Lxc.directory(File.join(@temp_dir, 'scratch.d'))
lxc.add('lxc.include', scratch_d)
scratch_file = scratch_d.add_file(File.join(@temp_dir, 'scratch.d', 'file.conf'))
scratch_file.add('# include scratch')
scratch_file.add('lxc.scratch_file', 'it_is_scratch_file')
lxc.write
lxc_read = Linux::Lxc.parse(lxc.file)
assert_equal lxc_read.get('#').length, 3
assert_equal lxc_read.get('lxc.cgroup.devices.allow').values, ['meno']
assert_equal lxc_read.get('lxc.cgroup.devices.allow').first.file, lxc.file
assert_equal lxc_read.get('lxc.cgroup.devices.allow').first.line, 2
assert_equal lxc_read.get('lxc.network.hwaddr').values, ['00:16:3e:67:03:4a']
assert_equal lxc_read.get('lxc.network.hwaddr').first.file, incl.file
assert_equal lxc_read.get('lxc.network.hwaddr').first.line, 2
assert_equal lxc_read.get('lxc.scratch_file').values, ['it_is_scratch_file']
assert_equal lxc_read.get('lxc.scratch_file').first.file, scratch_file.file
assert_equal lxc_read.get('lxc.scratch_file').first.line, 2
assert_equal lxc_read.index.files.length, 3
end
def test_comment
lxc = Linux::Lxc.parse(@lxc_config)
assert_equal lxc.get('#').length, 42
assert_equal lxc.get('lxc.cgroup.devices.allow').length, 16
lxc.get('lxc.cgroup.devices.allow')[0].comment!
assert_equal lxc.get('lxc.cgroup.devices.allow').length, 15
assert_equal lxc.get('#').length, 43
lxc.get('lxc.network').comment!
assert_equal lxc.get('#').length, 47
assert_equal lxc.get('#')[45].to_s, '# lxc.network.link = lxcbr0'
assert_equal lxc.get('lxc.network'), nil
lxc.index.files.values.each do |file|
file.real_fname = ::File.join(::File.dirname(file.file), "-.#{::File.basename(file.file)}")
end
lxc.write
l2 = Linux::Lxc.parse(::File.join(::File.dirname(@lxc_config), "-.#{::File.basename(@lxc_config)}"))
assert_equal l2.lines.first.key, "#"
assert_equal l2.lines.first.value, "# Template used to create this container: /usr/share/lxc/templates/lxc-ubuntu"
lxc.index.files.values.each do |file|
file.file = ::File.join(::File.dirname(file.file), "+.#{::File.basename(file.file)}")
end
lxc.write
l3 = Linux::Lxc.parse(::File.join(::File.dirname(@lxc_config), "+.#{::File.basename(@lxc_config)}"))
assert_equal l3.lines.first.key, "#"
assert_equal l3.lines.first.value, "# Template used to create this container: /usr/share/lxc/templates/lxc-ubuntu"
assert_equal ::File.basename(l3.index.files.values[1].file), "+.ubuntu.common.conf"
assert_equal l3.index.files.values[1].lines.first.key, "#"
assert_equal l3.index.files.values[1].lines.first.value, "# Default pivot location"
end
def test_real_fname
lxc = Linux::Lxc.file(File.join(@temp_dir, 'real_name'))
lxc.add('# base meno')
lxc.add('lxc.cgroup.devices.allow', 'meno')
lxc.write
lxc.real_fname = File.join(@temp_dir, 'test_name')
incl = Linux::Lxc.file(File.join(@temp_dir, 'test_incl'))
incl.real_fname = File.join(@temp_dir, 'real_incl')
lxc.add('lxc.include', incl)
incl.add('# include meno')
incl.add('lxc.network.hwaddr', '00:16:3e:67:03:4a')
lxc.write
assert_equal File.exist?(File.join(@temp_dir, 'test_name')), true
assert_equal File.exist?(File.join(@temp_dir, 'real_name')), true
assert_equal File.exist?(File.join(@temp_dir, 'real_incl')), true
assert_equal File.exist?(File.join(@temp_dir, 'test_incl')), false
# assert_raise do #Fails, no Exceptions are raised
begin
lxc = Linux::Lxc.parse(File.join(@temp_dir, 'test_name'))
assert_equal 'Doof', 'Darf nie passieren'
rescue Exception => e
assert_equal e.instance_of?(Errno::ENOENT), true
assert_equal File.basename(e.message), 'test_incl'
end
# end
end
def test_lines
lxc = Linux::Lxc.parse(@lxc_config)
cnt = 0
lxc.all_lines { |_line| cnt += 1 }
assert_equal cnt, 109
end
def test_files
lxc = Linux::Lxc.parse(@lxc_config)
files = lxc.index.files.keys
assert_equal files[0], @lxc_config
assert_equal files[1], @lxc_ubuntu_common_conf
assert_equal files[2], @lxc_common_conf_d_wildcard
assert_equal files.length, 3
end
def test_write
lxc = Linux::Lxc.parse(@lxc_config)
inc_file = "#{lxc.get('lxc.cgroup.devices.allow').first.lxc.file}.new"
lxc.get('lxc.cgroup.devices.allow').first.lxc.file = inc_file
lxc.get('lxc.cgroup.devices.allow')[5].value = 'meno'
assert_equal lxc.get('lxc.cgroup.devices.allow').values[5], 'meno'
lxc.get('lxc.network.hwaddr').first.value = 'construqt'
assert_equal lxc.get('lxc.network.hwaddr').values, ['construqt']
assert_equal lxc.get('lxc.network.hwaddr').find{|i| i.value == 'construqt'}.value, 'construqt'
lxc.write
lxc_read = Linux::Lxc.parse(lxc.file)
assert_equal lxc_read.get('lxc.cgroup.devices.allow').values[5], 'meno'
assert_equal lxc_read.get('lxc.cgroup.devices.allow')[5].file, inc_file
assert_equal lxc_read.get('lxc.cgroup.devices.allow')[5].line, 49
assert_equal lxc_read.get('lxc.network.hwaddr').values, ['construqt']
assert_equal lxc_read.get('lxc.network.hwaddr').first.file, lxc.file
assert_equal lxc_read.get('lxc.network.hwaddr').first.line, 18
end
def test_numeric_prefix_order
assert_equal Linux::Lxc.numeric_prefix_order(["100_a", "1_b", "34d"]), ["1_b","34d","100_a"]
assert_equal Linux::Lxc.numeric_prefix_order(["1_c", "1_a", "1a"]), ["1_a","1_c","1a"]
assert_equal Linux::Lxc.numeric_prefix_order(["000100_a", "000001_b", "034d"]), ["000001_b","034d","000100_a"]
assert_equal Linux::Lxc.numeric_prefix_order(["foo","100_a", "000001_b", "bar", "34d"]), ["000001_b","34d","100_a","bar", "foo"]
assert_equal Linux::Lxc.numeric_prefix_order(["foo","yyy", "bar"]), ["bar", "foo", "yyy"]
end
end
|
<reponame>planetsolutions/pa-front<filename>src/app/objects-list/setup/setup-dialog.component.ts
import {Component, EventEmitter, Input, OnInit, ViewChild} from '@angular/core';
import {BsModalRef} from 'ngx-bootstrap';
import {ResultMasterPanelTabColumn} from '../../index';
import {DisplayTypes} from '../objects-list.component';
@Component({
selector: 'app-objects-list-setup-dialog',
templateUrl: './setup-dialog.component.html'
})
export class ObjectsListSetupDialogComponent implements OnInit {
public columns: ResultMasterPanelTabColumn[];
public hiddenList: {};
public onSubmit: EventEmitter<{newHiddenList: any, pageSize: any, autoRefresh: any, displayType: any}> = new EventEmitter<{newHiddenList: any, pageSize: any, autoRefresh: any, displayType: any}>();
public pageSize = 0;
public autoRefresh = 0;
public hidePages = false;
public displayType = '';
constructor(public bsModalRef: BsModalRef) { }
ngOnInit() {
}
public toggleColumn(index) {
const c = this.columns[index];
if (this.hiddenList[c.name]) {
delete this.hiddenList[c.name];
} else {
this.hiddenList[c.name] = true;
}
}
public submit(): void {
this.onSubmit.emit({newHiddenList: this.hiddenList, pageSize: this.pageSize, autoRefresh: this.autoRefresh, displayType: this.displayType});
this.bsModalRef.hide();
}
public setDisplayType(type: string) {
if (type === DisplayTypes.TILES) {
this.displayType = DisplayTypes.TILES;
} else if (type === DisplayTypes.TABLE) {
this.displayType = DisplayTypes.TABLE;
} else {
this.displayType = ''
}
}
}
|
<filename>src/renderer/global.d.ts
declare module '*.scss' {
const content: { [className: string]: string };
export default content;
}
declare const __static: string;
declare const loadlive2d: any;
declare const Live2D: any;
|
<filename>options.js
function save_options(){
var delay = document.getElementById("delay").value;
var limit = document.getElementById("limit").value;
chrome.storage.sync.set({
attemptDelay: delay,
attemptLimit: limit
}, function(){
var status = document.getElementById("status");
status.textContent = "Options Saved";
setTimeout(function(){
status.textContent = "";
}, 750);
});
}
function restore_options(){
chrome.storage.sync.get({
attemptDelay: 3,
attemptLimit: -1
}, function(items){
document.getElementById("delay").value = items.attemptDelay;
if(items.attemptLimit > 0){
document.getElementById("limit").value = items.attemptLimit;
}
});
}
document.addEventListener("DOMContentLoaded", restore_options);
document.getElementById("save").addEventListener("click", save_options);
|
#!/bin/bash
echo "Starting to exec"
VERSION=1.0.0.M1-`date +%Y%m%d_%H%M%S`-VERSION
MESSAGE="[Concourse CI] Bump to Next Version ($VERSION)"
cd out
echo "$(ls -al)"
cp -r ../version/. ./
echo "Bump to ${VERSION}"
echo "${VERSION}" > version
git config --global user.email "${GIT_EMAIL}"
git config --global user.name "${GIT_NAME}"
git add version
git commit -m "${MESSAGE}"
|
import {getTasksRelationalDataDictionary, getTasks} from '../selectors/tasks';
export const UPDATE_TASK = 'UPDATE_TASK';
function _updateTask(id, diff) {
return (dispatch) => {
dispatch({type: UPDATE_TASK, id, diff});
}
}
export const DELETE_TASK = 'DELETE_TASK';
function _deleteTask(id) {
return (dispatch) => {
dispatch({type: DELETE_TASK, id});
};
}
export const ADD_NEW_TASK = 'ADD_NEW_TASK';
export function addNewTask(task, onIdReceived) {
return (dispatch, getState) => {
const state = getState();
const dictionary = getTasksRelationalDataDictionary(state);
const parent = dictionary[task.parentId];
const id = getTasks(state).length + 1 + "";
dispatch({
type: ADD_NEW_TASK,
task: { ...task, id }
});
if (parent && !parent.isLeaf) {
dispatch(setTaskParent(parent.childId, id));
}
onIdReceived(id);
};
}
function setTaskParent(id, parentId) {
return _updateTask(id, {parentId});
}
export function deleteTask(id) {
return (dispatch, getState) => {
const state = getState();
const task = getTasksRelationalDataDictionary(state)[id];
if (!task.isLeaf) {
const parentId = task.isRoot ? null : task.parentId;
dispatch(setTaskParent(task.childId, parentId));
}
dispatch(_deleteTask(id));
};
}
export function moveTaskLeft(id) {
return (dispatch, getState) => {
const state = getState();
const dictionary = getTasksRelationalDataDictionary(state);
const task = dictionary[id];
if (!task.isRoot) {
const parentId = task.parentId;
dispatch(setTaskParent(id, dictionary[parentId].parentId));
dispatch(setTaskParent(parentId, id));
if (!task.isLeaf) {
dispatch(setTaskParent(task.childId, parentId));
}
}
};
}
export function moveTaskRight(id) {
return (dispatch, getState) => {
const state = getState();
const task = getTasksRelationalDataDictionary(state)[id];
if (!task.isLeaf) {
dispatch(moveTaskLeft(task.childId));
}
};
}
export function updateTask(id, diff) {
return (dispatch, getState) => {
const state = getState();
const dictionary = getTasksRelationalDataDictionary(state);
const task = dictionary[id];
dispatch(_updateTask(id, diff));
const phaseChange = diff.phaseId !== undefined && diff.phaseId !== task.phaseId;
const teamChange = diff.teamId !== undefined && diff.teamId !== task.teamId;
const parentChange = diff.parentId !== undefined && diff.parentId !== task.parentId;
if (phaseChange || teamChange || parentChange) {
let temp = task;
// если таск перемещается в другую фазу или навешивается на другую команду,
// все дочерние таски тоже "перемещаются" вслед за ним.
while (!temp.isLeaf) {
if (phaseChange) {
dispatch(_updateTask(temp.childId, {phaseId: diff.phaseId}));
}
if (teamChange) {
dispatch(_updateTask(temp.childId, {teamId: diff.teamId}));
}
temp = dictionary[temp.childId];
}
const tasksLastChild = temp;
// если таск привязали к другому родителю,
// вся започка (сам таск + все его дочерние таски) должна вклиниться между
// новым родительским таском и его прежним непосредственным дочерним таском
//
// A--B---C--D
// ↑ => A--B--[E--F--G--H]--C--D
// E--F--G--H
if (parentChange) {
const newParent = dictionary[diff.parentId];
if (newParent && !newParent.isLeaf) {
dispatch(_updateTask(newParent.childId, {parentId: tasksLastChild.id}));
}
}
}
};
}
|
// import { Document }. from 'mongoose';
export interface Tasks{
id?:string,
title?:string
}
|
<filename>src/handlers/add-todo.js<gh_stars>0
'use strict';
const addToDoHandler = (event) => {
//console.log('ik ook ik ook');
event.preventDefault();
// event delegation!
const target = event.target;
if (target.nodeName !== 'INPUT' ) {
return;
}
if(event.keyCode === 13){
// update state using app method
const inputEl = document.getElementById('todo-input');
const userInput = inputEl.value;
app.addToDo(userInput);
localStorage.setItem('app', JSON.stringify(app));
inputEl.value = '';
inputEl.focus();
const todosView = renderTodos(app._state.todos);
//todosView.addEventListener('change', toggleCompletedHandler); // event delegation!
document.getElementById('root').innerHTML = '';
document.getElementById('root').appendChild(todosView);
const lis = document.querySelectorAll('li');
for (const li of lis){
const index = li.id;
if (app.state.todos[index].completed){
li.classList.add('completed');
}
}
logger.push({
action: 'add todo',
event,
userInput,
state: app.state
});
}
};
|
#!/usr/bin/env bash
set -o pipefail # trace ERR through pipes
set -o errtrace # trace ERR through 'time command' and other functions
set -o errexit ## set -e : exit the script if any statement returns a non-true return value
get_script_dir () {
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$( readlink "$SOURCE" )"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
done
cd -P "$( dirname "$SOURCE" )"
pwd
}
WORKSPACE=$(get_script_dir)
if pgrep -lf sshuttle > /dev/null ; then
echo "sshuttle detected. Please close this program as it messes with networking and prevents builds inside Docker to work"
exit 1
fi
if [ $NO_SUDO ]; then
CAPTAIN="captain"
DOCKER="docker"
elif groups $USER | grep &>/dev/null '\bdocker\b'; then
CAPTAIN="captain"
DOCKER="docker"
else
CAPTAIN="sudo captain"
DOCKER="sudo docker"
fi
# Build
echo "Build the project..."
./build.sh
# Cannot run the integration test now as the jar needs to be deployed to BinTray first
# ./tests/test.sh
echo "[ok] Done"
count=$(git status --porcelain | wc -l)
if test $count -gt 0; then
git status
echo "Not all files have been committed in Git. Release aborted"
exit 1
fi
select_part() {
local choice=$1
case "$choice" in
"Patch release")
bumpversion patch
;;
"Minor release")
bumpversion minor
;;
"Major release")
bumpversion major
;;
*)
read -p "Version > " version
bumpversion --new-version=$version all
;;
esac
}
git pull --tags
# Look for a version tag in Git. If not found, ask the user to provide one
[ $(git tag --points-at HEAD | grep java-weka | wc -l) == 1 ] || (
latest_version=$(bumpversion --dry-run --list patch | grep current_version | sed -r s,"^.*=",, || echo '0.0.1')
echo
echo "Current commit has not been tagged with a version. Latest known version is $latest_version."
echo
echo 'What do you want to release?'
PS3='Select the version increment> '
options=("Patch release" "Minor release" "Major release" "Release with a custom version")
select choice in "${options[@]}";
do
select_part "$choice"
break
done
updated_version=$(bumpversion --dry-run --list patch | grep current_version | sed -r s,"^.*=",,)
read -p "Release version $updated_version? [y/N] > " ok
if [ "$ok" != "y" ]; then
echo "Release aborted"
exit 1
fi
)
updated_version=$(bumpversion --dry-run --list patch | grep current_version | sed -r s,"^.*=",,)
# Build again to update the version
echo "Build the project for distribution..."
./build.sh
# Extract the jar from the Docker image and publish it to BinTray first to be able to execute the tests
mkdir -p target/
$DOCKER rm -f java-weka-published 2> /dev/null || true
$DOCKER run -d --rm --name java-weka-published hbpmip/java-weka:latest serve
$DOCKER container cp java-weka-published:/usr/share/jars/mip-weka.jar target/mip-weka-for-deploy.jar
$DOCKER rm -f java-weka-published
mvn deploy:deploy-file \
"-Durl=https://api.bintray.com/maven/hbpmedical/maven/eu.humanbrainproject.mip.algorithms:weka/;publish=1" \
-DrepositoryId=bintray-hbpmedical-maven -Dfile=target/mip-weka-for-deploy.jar -DpomFile=pom.xml
./tests/test.sh
echo "[ok] Done"
git push
git push --tags
# Push on Docker Hub
# WARNING: Requires captain 1.1.0 to push user tags
BUILD_DATE=$(date -Iseconds) \
VCS_REF=$updated_version \
VERSION=$updated_version \
WORKSPACE=$WORKSPACE \
$CAPTAIN push target_image --branch-tags=false --commit-tags=false --tag $updated_version
# Notify Microbadger
curl -XPOST https://hooks.microbadger.com/images/hbpmip/java-weka/eqm5EMJzbfgo1X3c_E03j5YxL1c=
# Notify on slack
sed "s/USER/${USER^}/" $WORKSPACE/slack.json > $WORKSPACE/.slack.json
sed -i.bak "s/VERSION/$updated_version/" $WORKSPACE/.slack.json
curl -k -X POST --data-urlencode payload@$WORKSPACE/.slack.json https://hbps1.chuv.ch/slack/dev-activity
rm -f $WORKSPACE/.slack.json $WORKSPACE/.slack.json.bak
|
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2020 Xilinx, Inc. All Rights Reserved.
#
if [ -z "$PATH" ]; then
PATH=/home/varun/tools/XilinX/Vitis/2020.2/bin:/home/varun/tools/XilinX/Vivado/2020.2/ids_lite/ISE/bin/lin64:/home/varun/tools/XilinX/Vivado/2020.2/bin
else
PATH=/home/varun/tools/XilinX/Vitis/2020.2/bin:/home/varun/tools/XilinX/Vivado/2020.2/ids_lite/ISE/bin/lin64:/home/varun/tools/XilinX/Vivado/2020.2/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=
else
LD_LIBRARY_PATH=:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD='/home/varun/coding/fpga/xylinx/pynq_z1/mpsoc_only_ps_gpio/mpsoc_only_ps_gpio.runs/synth_1'
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
EAStep vivado -log mpsoc_only_ps_gpio_design_wrapper.vds -m64 -product Vivado -mode batch -messageDb vivado.pb -notrace -source mpsoc_only_ps_gpio_design_wrapper.tcl
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.