text stringlengths 1 1.05M |
|---|
#!/bin/bash
# Start Server in development or production
# Development will watch for python file changes
serverFile="${PWD}/src/server.py"
devServerFile="${PWD}/src/sourceChangeMonitor.py"
if [ "$APP_ENVIRONMENT" = "development" ]
then
# watch files for changes
python $devServerFile $serverFile
else
# Run server file
python $serverFile
fi |
/*********************************************************************
* Software License Agreement (BSD License)
*
* Copyright (c) 2012-2016, <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of <NAME> nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*********************************************************************/
#ifndef _STEREO_NODE_H_
#define _STEREO_NODE_H_
// Use includes from CameraNode
#include <ueye/CameraNode.h>
#include <ueye/stereoConfig.h>
// Threading
#include <boost/thread/mutex.hpp>
#include <boost/thread/lock_guard.hpp>
#include <boost/interprocess/sync/scoped_lock.hpp>
#include <boost/date_time/posix_time/posix_time.hpp>
// Msg for exposure and pps
#include "ueye/exposure.h"
#include "ueye/ppscontrol.h"
#include "ueye/extras.h"
namespace ueye
{
class StereoNode
{
public:
StereoNode(ros::NodeHandle node, ros::NodeHandle private_nh);
~StereoNode();
private:
// ROS callbacks
void reconfig(stereoConfig &config, uint32_t level);
void reconfigCam(stereoConfig &config, uint32_t level, Camera &cam);
void timerCallback(const ros::TimerEvent& event);
void timerForceTrigger(const ros::TimerEvent& event);
bool setCameraInfoL(sensor_msgs::SetCameraInfo::Request& req, sensor_msgs::SetCameraInfo::Response& rsp);
bool setCameraInfoR(sensor_msgs::SetCameraInfo::Request& req, sensor_msgs::SetCameraInfo::Response& rsp);
bool setCameraInfo(sensor_msgs::SetCameraInfo::Request& req, sensor_msgs::SetCameraInfo::Response& rsp, Camera& cam,
sensor_msgs::CameraInfo &msg_info);
void loadIntrinsics(Camera &cam, sensor_msgs::CameraInfo &msg_info);
sensor_msgs::ImagePtr processFrame(const char *frame, size_t size, const Camera &cam, sensor_msgs::CameraInfoPtr &info, sensor_msgs::CameraInfo &msg_info);
void publishImageL(const char *frame, size_t size, ros::Time stamp, int pps, double exposure, unsigned int gain, unsigned long long frame_count);
void publishImageR(const char *frame, size_t size, ros::Time stamp, int pps, double exposure, unsigned int gain, unsigned long long frame_count);
//void publishImageLfromList();
//void publishImageRfromList();
void startCamera();
void stopCamera();
void closeCamera();
void handlePath(std::string &path);
void DrawBrightnessAOI_L(sensor_msgs::ImagePtr &msg);
void DrawBrightnessAOI_R(sensor_msgs::ImagePtr &msg);
void BinImg(sensor_msgs::ImagePtr &msg);
dynamic_reconfigure::Server<stereoConfig> srv_;
ros::Timer timer_;
ros::Timer timer_force_trigger_;
sensor_msgs::CameraInfo l_msg_camera_info_, r_msg_camera_info_;
ueye::Camera l_cam_, r_cam_;
bool running_;
bool configured_;
bool binning_;
bool visualize_;
bool force_streaming_;
std::string config_path_;
int trigger_mode_;
bool auto_exposure_;
bool auto_gain_;
int zoom_;
ros::Time l_stamp_, r_stamp_;
IS_RECT l_aoi_, r_aoi_;
IS_RECT l_brightness_aoi_, r_brightness_aoi_;
IS_RECT l_visualize_brightness_aoi_, r_visualize_brightness_aoi_;
double l_exposure_;
double r_exposure_;
double l_exposure_new_;
double r_exposure_new_;
//ueye::exposure exposure_calib_;
//ueye::ppscontrol pps_;
ueye::extras left_extras_;
ueye::extras right_extras_;
bool publish_extras_;
//bool l_firstPPScontrolValueNeeded_;
//bool r_firstPPScontrolValueNeeded_;
double exposure_time_;
int leftPpsCount;
int rightPpsCount;
int l_frameNo;
int r_frameNo;
boost::thread l_thread_;
boost::thread r_thread_;
bool stop_publish_;
boost::condition_variable cond_l_stamp_ready, cond_r_stamp_ready, cond_l_img_info_ready, cond_r_img_info_ready;
bool l_stamp_ready, r_stamp_ready, l_img_info_ready, r_img_info_ready;
boost::posix_time::time_duration timeout;
// ROS topics
image_transport::ImageTransport it_;
image_transport::CameraPublisher l_pub_stream_, r_pub_stream_;
ros::ServiceServer l_srv_cam_info_, r_srv_cam_info_;
ros::Publisher l_pub_extras_, r_pub_extras_;
// Threading
boost::mutex mutex_;
boost::mutex mutex2_;
boost::mutex mutex3_;
};
} // namespace ueye
#endif // _STEREO_NODE_H_
|
#!/bin/bash
mvnw org.springframework.cloud:spring-cloud-contract-maven-plugin:2.1.0.BUILD-SNAPSHOT:convert |
<filename>CommonUtils/src/com/cats/utils/UserDirRestore.java<gh_stars>0
/*
* Copyright 2015 lixiaobo
*
* VersionUpgrade project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.cats.utils;
import java.io.File;
import java.net.URL;
import java.net.URLDecoder;
/**
* @author xiaobolx
* 2015年10月30日
*/
public class UserDirRestore
{
private static String getRealUserDir()
{
URL url = UserDirRestore.class.getProtectionDomain().getCodeSource().getLocation();
String filePath = null;
try
{
filePath = URLDecoder.decode(url.getPath(), "utf-8");
} catch (Exception e)
{
e.printStackTrace();
}
if (null != filePath && filePath.endsWith(".jar"))
{
try
{
filePath = filePath.substring(0, filePath.lastIndexOf("/") + 1);
File file = new File(filePath);
filePath = file.getAbsolutePath();// 得到windows下的正确路径
return filePath;
} catch (Exception e)
{
e.printStackTrace();
}
}
return System.getProperty("user.dir");
}
public static void restoreUserDir()
{
System.setProperty("user.dir", getRealUserDir());
}
}
|
<filename>f_stub_mock_operation/test_lib_mock.py<gh_stars>1-10
from typing import List
from datetime import date
import lib
import pytest_mock
def test_room602_operations_log(operation_data: List[lib.Operation],
mocker: pytest_mock.MockFixture):
mocker.patch('lib.get_operations_from_db',
autospec=True,
return_value=operation_data)
mock_log = mocker.patch('lib.log', autospec=True)
room = '602'
operations = lib.all_operations(date(2021, 7, 13), room)
assert len(operations) == 2
mock_log.assert_called_once_with(f"Query for room-'602'")
def test_room602_operations_log_with_spy(operation_data: List[lib.Operation],
mocker: pytest_mock.MockFixture):
mocker.patch('lib.get_operations_from_db',
autospec=True,
return_value=operation_data)
mock_log = mocker.spy(lib, 'log')
room = '602'
operations = lib.all_operations(date(2021, 7, 13), room)
assert len(operations) == 2
mock_log.assert_called_once_with(f"Query for room-'602'")
|
from kivy.app import App
from kivy.clock import Clock
from kivy.uix.label import Label
from kivy.uix.floatlayout import FloatLayout
from kivy.properties import NumericProperty, ObjectProperty
from kivy.vector import Vector
from kivy.animation import Animation
class TextLabel(Label):
angle = NumericProperty(0)
center_x = NumericProperty(0)
center_y = NumericProperty(0)
def __init__(self, **kwargs):
super(TextLabel, self).__init__(**kwargs)
self.text = "Moving Label"
self.pos_hint = {"center_x": 0.5, "center_y": 0.5}
self.size_hint = (1, 1)
self.texture_size = self.size
self.center_x = Window.width / 2
self.center_y = Window.height / 2
self.angle = 0
self.animation = Animation(angle=360, duration=1) + Animation(angle=0, duration=0)
self.animation.repeat = True
self.animation.start(self)
def on_angle(self, instance, value):
radius = min(Window.width, Window.height) / 3
self.center_x = Window.width / 2 + radius * Vector(1, 0).rotate(self.angle)[0]
self.center_y = Window.height / 2 + radius * Vector(1, 0).rotate(self.angle)[1]
class MovingLabelApp(App):
def build(self):
layout = FloatLayout()
label = TextLabel()
layout.add_widget(label)
return layout
if __name__ == '__main__':
MovingLabelApp().run() |
from typing import Tuple
def parse_package_definition(code_snippet: str) -> Tuple[str, str]:
# Find the index of the opening and closing single quotes
start_quote_index = code_snippet.find("'")
end_quote_index = code_snippet.rfind("'")
# Extract the package name and version using string slicing
package_name = code_snippet[start_quote_index + 1: end_quote_index]
version_index = code_snippet.find(",", start_quote_index)
package_version = code_snippet[version_index + 2: end_quote_index]
return package_name, package_version
# Test the function with the given code snippet
code_snippet = "shipyard2.rules.pythons.define_pypi_package('requests', '2.26.0')"
print(parse_package_definition(code_snippet)) # Output: ('requests', '2.26.0') |
SELECT city_name
FROM cities
WHERE country = 'France' |
#!/usr/bin/env bash
# To use an OpenStack cloud you need to authenticate against the Identity
# service named keystone, which returns a **Token** and **Service Catalog**.
# The catalog contains the endpoints for all services the user/tenant has
# access to - such as Compute, Image Service, Identity, Object Storage, Block
# Storage, and Networking (code-named nova, glance, keystone, swift,
# cinder, and neutron).
#
# *NOTE*: Using the 3 *Identity API* does not necessarily mean any other
# OpenStack API is version 3. For example, your cloud provider may implement
# Image API v1.1, Block Storage API v2, and Compute API v2.0. OS_AUTH_URL is
# only for the Identity API served through keystone.
export OS_AUTH_URL=https://keystone.rc.nectar.org.au:5000/v3/
# With the addition of Keystone we have standardized on the term **project**
# as the entity that owns the resources.
export OS_PROJECT_ID=5d12b5ea318a44528e0e1db07ef490aa
export OS_PROJECT_NAME="unimelb-comp90024-2020-grp-18"
export OS_USER_DOMAIN_NAME="Default"
if [ -z "$OS_USER_DOMAIN_NAME" ]; then unset OS_USER_DOMAIN_NAME; fi
export OS_PROJECT_DOMAIN_ID="default"
if [ -z "$OS_PROJECT_DOMAIN_ID" ]; then unset OS_PROJECT_DOMAIN_ID; fi
# unset v2.0 items in case set
unset OS_TENANT_ID
unset OS_TENANT_NAME
# In addition to the owning entity (tenant), OpenStack stores the entity
# performing the action as the **user**.
export OS_USERNAME="jingyi.li@student.unimelb.edu.au"
# With Keystone you pass the keystone password.
echo "Please enter your OpenStack Password for project $OS_PROJECT_NAME as user $OS_USERNAME: "
read -sr OS_PASSWORD_INPUT
export OS_PASSWORD=$OS_PASSWORD_INPUT
# If your configuration has multiple regions, we set that information here.
# OS_REGION_NAME is optional and only valid in certain environments.
export OS_REGION_NAME="Melbourne"
# Don't leave a blank variable, unset it if it was empty
if [ -z "$OS_REGION_NAME" ]; then unset OS_REGION_NAME; fi
export OS_INTERFACE=public
export OS_IDENTITY_API_VERSION=3 |
#!/bin/bash
#SBATCH -J Act_selu_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py selu 174 Nadam 3 0.5245884720146486 0.0020337057375237233 orth 0.3
|
/// <reference path="../../core/Collider.ts"/>
/// <reference path="../../math/Rectangle.ts"/>
module WOZLLA.component {
/**
* @class WOZLLA.component.MaskCollider
*/
export class MaskCollider extends WOZLLA.Collider {
collideXY(localX:number, localY:number):boolean {
return true;
}
collide(collider:Collider):boolean {
return false;
}
}
} |
<reponame>mykaelandrade/fiscal4j
package br.indie.fiscal4j.validadores;
public abstract class IntegerValidador {
public static void tamanho4(final Integer valor, final String info) {
IntegerValidador.limite(valor, 9999, info);
}
public static void tamanho7(final Integer valor, final String info) {
IntegerValidador.limite(valor, 9999999, info);
}
public static void tamanho9(final Integer valor, final String info) {
IntegerValidador.limite(valor, 999999999, info);
}
public static void exatamente7(final Integer valor, final String info) {
IntegerValidador.intervalo(valor, 1000000, 9999999, info);
}
public static void exatamente4(final int valor, final String info) {
IntegerValidador.intervalo(valor, 1000, 9999, info);
}
public static void tamanho3(final int valor, final String info) {
IntegerValidador.limite(valor, 999, info);
}
public static void tamanho3maximo990(final int valor, final String info) {
IntegerValidador.limite(valor, 990, info);
}
public static void exatamente6(final Integer valor, final String info) {
IntegerValidador.intervalo(valor, 100000, 999999, info);
}
public static void tamanho6(final Integer valor, final String info) {
IntegerValidador.limite(valor, 999999, info);
}
public static void exatamente1(final Integer valor, final String info) {
IntegerValidador.limite(valor, 9, info);
}
public static void tamanho1a2(final int valor, final String info) {
IntegerValidador.intervalo(valor, 1, 99, info);
}
public static void exatamente2(final Integer valor, final String info) {
IntegerValidador.intervalo(valor, 10, 99, info);
}
private static void limite(final Integer valor, final int maximo, final String info) {
if (valor != null && valor > maximo) {
throw new NumberFormatException("Valor extrapolou o tamanho do campo " + info);
}
}
private static void intervalo(final Integer valor, final int minimo, final int maximo, final String info) {
if (valor != null && (valor < minimo || valor > maximo)) {
throw new NumberFormatException(String.format("Valor %s tem tamanho fora do intervalo de [%s-%s]", info, minimo, maximo));
}
}
}
|
package de.schub.marathon_scaler.Monitoring.Backend.Prometheus.Exception;
/**
* Exception thrown when a query failed
*/
public class PrometheusQueryException extends PrometheusException
{
public PrometheusQueryException(String expression, String error)
{
super(String.format("Failed to execute prometheus query '%s': %s", expression, error));
}
}
|
const gulp = require("gulp");
const sass = require("gulp-sass");
const typescript = require("gulp-tsc");
const sassPaths = ['./static/*.scss']; // Don't include the node_modules folder
const tsPaths = ['./server.ts', './app/**/*.ts']; // Don't include the node_modules folder
gulp.task('task-typescript', () => {
gulp.src(tsPaths, {base: '.'})
.pipe(typescript({
tmpDir: '.tmp'
}))
.pipe(gulp.dest('.'));
});
gulp.task('task-sass', () => {
gulp.src(sassPaths)
.pipe(sass())
.pipe(gulp.dest(f => f.base));
});
gulp.task('watch-tsc', () => {
gulp.watch(tsPaths, ['task-tsc']);
});
gulp.task('watch-sass', () => {
gulp.watch(sassPaths, ['task-sass']);
});
gulp.task('watch-ts-and-sass-debug', () => {
gulp.watch(sassPaths, ['task-sass']);
gulp.watch(tsPaths, ['task-typescript']);
}); |
#!/bin/sh
filesdir=$1
searchstr=$2
cd
if [ "$#" -ne 2 ] ; then
echo "2 arguments required"
echo "1: the file directory path"
echo "2: the name of the file"
exit 1
else
if [ -d "${filesdir}" ] ; then
cd "${filesdir}"
numlines=$(grep -x -r "${searchstr}" * | wc -l)
numfiles=$(grep -l -Rx "${filesdir}" -e "${searchstr}" | wc -l)
echo "The number of files are ${numfiles} and the number of matching lines are ${numlines}"
else
echo "Argument 1 is not a directory"
exit 1
fi
fi
|
#!/bin/bash
#
# Metrics sync will takes metrics from the osde2e metrics bucket and sync them with the datahub Prometheus pushgateway.
#
METRICS_BUCKET=osde2e-metrics
INCOMING=incoming
PROCESSED=processed
VENV="$(mktemp -d)"
METRICS_DIR="$(mktemp -d)"
METRIC_TIMEOUT_IN_SECONDS=21600 # 6h in seconds
METRIC_ADDON_TIMEOUT_IN_SECONDS=46800 # 13h in seconds
METRIC_SCALE_TIMEOUT_IN_SECONDS=93600 # 26h in seconds
METRIC_IMAGESET_TIMEOUT_IN_SECONDS=93600 # 26h in seconds
PUSHGATEWAY_URL=${PUSHGATEWAY_URL%/}
# Cleanup the temporary directories
trap 'rm -rf "$VENV" "$METRICS_DIR"' EXIT
# First, we should detect any stale metrics and purge them if needed
METRICS_LAST_UPDATED=$(curl "$PUSHGATEWAY_URL/metrics" | grep "^push_time_seconds{.*" | grep -E 'osde2e|ocm-api-test' | sed 's/^.*job="\([[:alnum:]_.-]*\)".*\}\s*\(.*\)$/\1,\2/' | sort | uniq)
CURRENT_TIMESTAMP=$(date +%s)
for metric_and_timestamp in $METRICS_LAST_UPDATED; do
JOB_NAME=$(echo -e "$metric_and_timestamp" | cut -f 1 -d,)
TIMESTAMP=$(echo -e "$metric_and_timestamp" | cut -f 2 -d, | xargs -d '\n' printf "%.f")
if echo "$JOB_NAME" | grep -qE ".*osde2e.*addon.*"; then
TIMESTAMP_PLUS_TIMEOUT=$((TIMESTAMP + METRIC_ADDON_TIMEOUT_IN_SECONDS))
elif echo "$JOB_NAME" | grep -qE ".*osde2e.*scale.*"; then
TIMESTAMP_PLUS_TIMEOUT=$((TIMESTAMP + METRIC_SCALE_TIMEOUT_IN_SECONDS))
elif echo "$JOB_NAME" | grep -qE ".*osde2e.*imageset.*"; then
TIMESTAMP_PLUS_TIMEOUT=$((TIMESTAMP + METRIC_IMAGESET_TIMEOUT_IN_SECONDS))
else
TIMESTAMP_PLUS_TIMEOUT=$((TIMESTAMP + METRIC_TIMEOUT_IN_SECONDS))
fi
if (( TIMESTAMP_PLUS_TIMEOUT < CURRENT_TIMESTAMP )); then
echo "Metrics for job $JOB_NAME have expired. Removing them from the pushgateway."
if ! curl -X DELETE "$PUSHGATEWAY_URL/metrics/job/$JOB_NAME"; then
echo "Error deleting old results for $JOB_NAME."
exit 3
fi
fi
done
virtualenv "$VENV"
# shellcheck source=/dev/null
. "$VENV/bin/activate"
pip install awscli
if ! aws s3 ls s3://$METRICS_BUCKET > /dev/null 2>&1 ; then
echo "AWS CLI not configured properly."
exit 1
fi
# We're going to iterate over each file as opposed to trying to grab things by wildcard.
# This way, we're guaranteed to process a fixed set of files.
METRICS_FILES=$(aws s3 ls "s3://$METRICS_BUCKET/$INCOMING/" | awk '{print $4}')
for file in $METRICS_FILES; do
INCOMING_FILE="$METRICS_BUCKET/$INCOMING/$file"
PROCESSED_FILE="$METRICS_BUCKET/$PROCESSED/$file"
echo "Processing $file"
if ! aws s3 cp "s3://$INCOMING_FILE" "$METRICS_DIR/$file"; then
echo "Error copying $INCOMING_FILE from S3."
exit 2
fi
# shellcheck disable=SC2001
JOB_NAME=$(echo "$file" | sed 's/^[^\.]*\.\(.*\)\.metrics\.prom$/\1/')
if [[ ! $JOB_NAME = delete_* ]]; then
if ! curl -X DELETE "$PUSHGATEWAY_URL/metrics/job/$JOB_NAME"; then
echo "Error deleting old results for $JOB_NAME."
exit 3
fi
if ! curl -T "$METRICS_DIR/$file" "$PUSHGATEWAY_URL/metrics/job/$JOB_NAME"; then
echo "Error pushing new results for $JOB_NAME."
exit 4
fi
if ! aws s3 mv "s3://$INCOMING_FILE" "s3://$PROCESSED_FILE"; then
echo "Error moving $INCOMING_FILE to $PROCESSED_FILE in S3."
exit 5
fi
echo "File has been processed and moved into the processed drectory."
else
echo "$file is a test file. Deleting it from S3."
if ! aws s3 rm "s3://$INCOMING_FILE"; then
echo "Error removing test file from S3."
exit 6
fi
fi
done
|
#! /bin/sh
#==================================================
#
# updated: 03/22/20 Reach Technology
#
# this script controls a system critical function
# and must not be put under control of chkconfig
#
#==================================================
#
# WARNING: If your hardware clock is not in UTC/GMT, this script
# must know the local time zone. This information is
# stored in /etc/localtime. This might be a problem if
# your /etc/localtime is a symlink to something in
# /usr/share/zoneinfo AND /usr isn't in the root
# partition! The workaround is to define TZ either
# in /etc/default/rcS, or in the proper place below.
[ ! -x /sbin/hwclock ] && exit 0
[ -f /etc/default/rcS ] && . /etc/default/rcS
[ "$UTC" = "yes" ] && tz="--utc" || tz="--localtime"
case "$1" in
start)
if [ "$VERBOSE" != no ]
then
echo "System time was `date`."
echo "Setting the System Clock using the Hardware Clock as reference..."
fi
if [ "$HWCLOCKACCESS" != no ]
then
if [ -z "$TZ" ]
then
hwclock $tz --hctosys
else
TZ="$TZ" hwclock $tz --hctosys
fi
fi
if [ "$VERBOSE" != no ]
then
echo "System Clock set. System local time is now `date`."
fi
;;
stop|restart|reload|force-reload)
#
# Updates the Hardware Clock with the System Clock time.
# This will *override* any changes made to the Hardware Clock.
#
# WARNING: If you disable this, any changes to the system
# clock will not be carried across reboots.
#
if [ "$VERBOSE" != no ]
then
echo "Saving the System Clock time to the Hardware Clock..."
fi
if [ "$HWCLOCKACCESS" != no ]
then
hwclock $tz --systohc
fi
if [ "$VERBOSE" != no ]
then
echo "Hardware Clock updated to `date`."
fi
exit 0
;;
show)
if [ "$HWCLOCKACCESS" != no ]
then
hwclock $tz --show
fi
;;
*)
echo "Usage: hwclock.sh {start|stop|show|reload|restart}" >&2
echo " start sets kernel (system) clock from hardware (RTC) clock" >&2
echo " stop and reload set hardware (RTC) clock from kernel (system) clock" >&2
exit 1
;;
esac
|
function toUpper(arr) {
let temp = [];
for (let i = 0; i < arr.length; i++) {
temp.push(arr[i].toUpperCase());
}
return temp;
} |
<reponame>collosdeveloper/tt
package com.knyaz.testtask.api.model.pojos;
import android.text.format.DateUtils;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
public class VideoItem implements Serializable {
public String id;
public String url;
public String name;
public String author;
public String description;
public String thumb_url;
public long ts;
public List<CommentItem> comments;
public Date getDate() {
return new Date((ts * DateUtils.SECOND_IN_MILLIS));
}
@Override
public String toString() {
return "VideoItem{" +
"id='" + id + '\'' +
", url='" + url + '\'' +
", name='" + name + '\'' +
", author='" + author + '\'' +
", description='" + description + '\'' +
", thumb_url='" + thumb_url + '\'' +
", ts=" + ts +
", comments=" + comments +
'}';
}
} |
<gh_stars>0
'use strict';
// external deps
var ObjectId = require('mongodb').ObjectId;
// internal deps
require('mongodb-toolkit');
var MmModels = require('mm-models');
var map = MmModels.map;
var ArticleApproval = MmModels.master.article.ArticleApproval;
var ArticleBrand = MmModels.master.article.ArticleBrand;
var ArticleCategory = MmModels.master.article.ArticleCategory;
var ArticleColor = MmModels.master.article.ArticleColor;
var ArticleCostCalculationDetail = MmModels.master.article.ArticleCostCalculationDetail;
var ArticleCostCalculation = MmModels.master.article.ArticleCostCalculation;
var ArticleCounter = MmModels.master.article.ArticleCounter;
var ArticleMaterial = MmModels.master.article.ArticleMaterial;
var ArticleMotif = MmModels.master.article.ArticleMotif;
var ArticleOrigin = MmModels.master.article.ArticleOrigin;
var ArticleSeason = MmModels.master.article.ArticleSeason;
var ArticleSize = MmModels.master.article.ArticleSize;
var ArticleSubCounter = MmModels.master.article.ArticleSubCounter;
var ArticleTheme = MmModels.master.article.ArticleTheme;
var ArticleType = MmModels.master.article.ArticleType;
var ArticleVariant = MmModels.master.article.ArticleVariant;
var Article = MmModels.master.article.Article;
module.exports = class ArticleTypeManager {
constructor(db, user) {
this.db = db;
this.user = user;
this.articleTypeCollection = this.db.use(map.master.article.ArticleType);
}
read(paging) {
var _paging = Object.assign({
page: 1,
size: 20,
order: '_id',
asc: true
}, paging);
return new Promise((resolve, reject) => {
var deleted = {
_deleted: false
};
var query = _paging.keyword ? {
'$and': [deleted]
} : deleted;
if (_paging.keyword) {
var regex = new RegExp(_paging.keyword, "i");
var filterCode = {
'code': {
'$regex': regex
}
};
var filterName = {
'name': {
'$regex': regex
}
};
var $or = {
'$or': [filterCode, filterName]
};
query['$and'].push($or);
}
this.articleTypeCollection
.where(query)
.page(_paging.page, _paging.size)
.orderBy(_paging.order, _paging.asc)
.execute()
.then(articleTypes => {
resolve(articleTypes);
})
.catch(e => {
reject(e);
});
});
}
getSingleById(id) {
return new Promise((resolve, reject) => {
if (id === '')
resolve(null);
var query = {
_id: new ObjectId(id),
_deleted: false
};
this.getSingleByQuery(query)
.then(articleType => {
resolve(articleType);
})
.catch(e => {
reject(e);
});
});
}
getSingleByIdOrDefault(id) {
return new Promise((resolve, reject) => {
if (id === '')
resolve(null);
var query = {
_id: new ObjectId(id),
_deleted: false
};
this.getSingleByQueryOrDefault(query)
.then(articleType => {
resolve(articleType);
})
.catch(e => {
reject(e);
});
});
}
getSingleByQuery(query) {
return new Promise((resolve, reject) => {
this.articleTypeCollection
.single(query)
.then(articleType => {
resolve(articleType);
})
.catch(e => {
reject(e);
});
})
}
getSingleByQueryOrDefault(query) {
return new Promise((resolve, reject) => {
this.articleTypeCollection
.singleOrDefault(query)
.then(articleType => {
resolve(articleType);
})
.catch(e => {
reject(e);
});
})
}
create(articleType) {
return new Promise((resolve, reject) => {
this._validate(articleType)
.then(validArticleType => {
this.articleTypeCollection.insert(validArticleType)
.then(id => {
resolve(id);
})
.catch(e => {
reject(e);
})
})
.catch(e => {
reject(e);
})
});
}
update(articleType) {
return new Promise((resolve, reject) => {
this._validate(articleType)
.then(validArticleType => {
this.articleTypeCollection.update(validArticleType)
.then(id => {
resolve(id);
})
.catch(e => {
reject(e);
})
})
.catch(e => {
reject(e);
})
});
}
delete(articleType) {
return new Promise((resolve, reject) => {
this._validate(articleType)
.then(validArticleType => {
validArticleType._deleted = true;
this.articleTypeCollection.update(validArticleType)
.then(id => {
resolve(id);
})
.catch(e => {
reject(e);
})
})
.catch(e => {
reject(e);
})
});
}
_validate(articleType) {
var errors = {};
return new Promise((resolve, reject) => {
var valid = new ArticleType(articleType);
// 1. begin: Declare promises.
var getArticleType = this.articleTypeCollection.singleOrDefault({
"$and": [{
_id: {
'$ne': new ObjectId(valid._id)
}
}, {
code: valid.code
}]
});
// 1. end: Declare promises.
// 2. begin: Validation.
Promise.all([getArticleType])
.then(results => {
var _articleType = results[0];
if (!valid.code || valid.code == '')
errors["code"] = "code is required";
else if (_articleType) {
errors["code"] = "code already exists";
}
if (!valid.name || valid.name == '')
errors["name"] = "name is required";
// 2c. begin: check if data has any error, reject if it has.
for (var prop in errors) {
var ValidationError = require('module-toolkit').ValidationError;
reject(new ValidationError('data does not pass validation', errors));
}
valid.stamp(this.user.username, 'manager');
resolve(valid);
})
.catch(e => {
reject(e);
})
});
}
}; |
<gh_stars>1-10
/*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1alpha1
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
// EDIT THIS FILE! THIS IS SCAFFOLDING FOR YOU TO OWN!
// NOTE: json tags are required. Any new fields you add must have json tags for the fields to be serialized.
// PowerProfileSpec defines the desired state of PowerProfile
type PowerProfileSpec struct {
// INSERT ADDITIONAL SPEC FIELDS - desired state of cluster
// Important: Run "make" to regenerate code after modifying this file
// The name of the PowerProfile
Name string `json:"name"`
// The maximum frequency the core is allowed go
Max int `json:"max,omitempty"`
// The minimum frequency the core is allowed go
Min int `json:"min,omitempty"`
// The priority value associated with this Power Profile
Epp string `json:"epp"`
}
// PowerProfileStatus defines the observed state of PowerProfile
type PowerProfileStatus struct {
// INSERT ADDITIONAL STATUS FIELD - define observed state of cluster
// Important: Run "make" to regenerate code after modifying this file
// The ID given to the power profile by AppQoS
ID int `json:"id"`
}
// +kubebuilder:object:root=true
// +kubebuilder:subresource:status
// PowerProfile is the Schema for the powerprofiles API
type PowerProfile struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Spec PowerProfileSpec `json:"spec,omitempty"`
Status PowerProfileStatus `json:"status,omitempty"`
}
// +kubebuilder:object:root=true
// PowerProfileList contains a list of PowerProfile
type PowerProfileList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty"`
Items []PowerProfile `json:"items"`
}
func init() {
SchemeBuilder.Register(&PowerProfile{}, &PowerProfileList{})
}
|
<reponame>milogert/website
import styles from './sections.module.css'
const Sections = () => <div>
Sections here.
</div>
export default Sections
|
<gh_stars>1-10
#ifdef ESP8266
#include <ESP8266WiFi.h>
#else
#include <WiFi.h>
#endif
#include <WiFiUdp.h>
#include <OSCMessage.h>
#include <OSCBundle.h>
#include <OSCData.h>
#include <Servo.h>
char ssid[] = "*********"; //your network SSID
char pass[] = "*********"; //your network password
WiFiUDP Udp;
const IPAddress outIp(10,40,10,105);
const unsigned int outPort = 9999;
const unsigned int localPort = 8888;
static const int servoPin = 13;
Servo servo1;
OSCErrorCode error;
unsigned int servoPos;
void setup() {
Serial.begin(115200);
servo1.attach(servoPin);
Serial.println();
Serial.print("Connecting to ");
Serial.println(ssid);
WiFi.begin(ssid, pass);
while (WiFi.status() != WL_CONNECTED) {
delay(500);
Serial.print(".");
}
Serial.println("");
Serial.println("WiFi connected");
Serial.println("IP address: ");
Serial.println(WiFi.localIP());
Serial.println("Starting UDP");
Udp.begin(localPort);
Serial.print("Local port: ");
#ifdef ESP32
Serial.println(localPort);
#else
Serial.println(Udp.localPort());
#endif
}
void servo(OSCMessage &msg) {
servoPos = msg.getInt(0);
servoPos = -servoPos + 84;
servo1.write(servoPos);
Serial.print("/servoPos: ");
Serial.println(servoPos);
}
void loop() {
OSCMessage message;
int size = Udp.parsePacket();
if (size > 0) {
while (size--) {
message.fill(Udp.read());
}
if(!message.hasError()){
message.dispatch("/servo", servo);
} else {
error = message.getError();
Serial.print("error: ");
Serial.println(error);
}
}
}
|
#pragma once
#include <vector>
#include "triangle.h"
#include "aabb.h"
class DebugMesh
{
public:
DebugMesh();
std::vector<Triangle>::const_iterator begin() const
{
return m_triangles.cbegin();
}
std::vector<Triangle>::const_iterator end() const
{
return m_triangles.cend();
}
const AABB& boundingBox() const
{
return m_boundingBox;
}
const std::vector<Triangle>& triangles() const
{
return m_triangles;
}
bool intersect(Ray& ray, HitInfo& hit_info) const;
private:
void initMesh();
void initRandomMesh();
std::vector<Triangle> m_triangles;
AABB m_boundingBox;
};
|
#!/bin/bash -eu
source functions.sh
FILE_SQL="${1}"
get_customer_name
get_athena_credentials
convert_sql_to_spatialite
echo
info "Created: ${FILE_VRT}"
info "Created: ${FILE_SQLITE}" |
<reponame>wjtree/wechat-parent<filename>wechat-sdk/src/main/java/com/app/wechat/request/WxMaterialAddNewsRequest.java
/**
* Copyright (c) 2017. Lorem ipsum dolor sit amet, consectetur adipiscing elit.
* Morbi non lorem porttitor neque feugiat blandit. Ut vitae ipsum eget quam lacinia accumsan.
* Etiam sed turpis ac ipsum condimentum fringilla. Maecenas magna.
* Proin dapibus sapien vel ante. Aliquam erat volutpat. Pellentesque sagittis ligula eget metus.
* Vestibulum commodo. Ut rhoncus gravida arcu.
*/
package com.app.wechat.request;
import com.app.wechat.domain.msg.WxNewsMsgModel;
import com.app.wechat.internal.code.HttpMethod;
import com.app.wechat.internal.code.WxUrl;
import com.app.wechat.response.WxMaterialAddNewsResponse;
/**
* <p>功 能:新增永久图文素材API的请求信息</p>
* <p>版 权:Copyright (c) 2017</p>
* <p>创建时间:2017年7月6日 下午6:53:48</p>
* <br/>
* <p>请注意:</p>
* <ol>
* <li>图文消息的具体内容中,微信后台将过滤外部的图片链接,图片url需通过"上传图文消息内的图片获取URL"接口上传图片获取</li>
* <li>图文消息支持正文中插入自己帐号和其他公众号已群发文章链接的能力</li>
* </ol>
*
* @author 王建
* @version 1.0
*/
public class WxMaterialAddNewsRequest extends AbstractWxRequest<WxMaterialAddNewsResponse> {
private static final long serialVersionUID = 1L;
private WxNewsMsgModel object;
public WxMaterialAddNewsRequest(WxNewsMsgModel object) {
this.object = object;
}
public WxNewsMsgModel getObject() {
return object;
}
public Class<WxMaterialAddNewsResponse> getResponseClass() {
return WxMaterialAddNewsResponse.class;
}
public String getUrl(String accessToken) {
return String.format(WxUrl.API_MATERIAL_ADD_NEWS, accessToken);
}
public HttpMethod getMethod() {
return HttpMethod.POST;
}
} |
require 'yt/models/base'
require 'yt/models/right_owner'
module Yt
module Models
# Provides methods to interact with YouTube ContentID asset ownership,
# which provide ownership information for the specified asset.
# @see https://developers.google.com/youtube/partner/docs/v1/ownership
class Ownership < Base
def initialize(options = {})
@data = options[:data] || {}
@auth = options[:auth]
@asset_id = options[:asset_id]
end
def update(attributes = {})
underscore_keys! attributes
do_update body: attributes
true
end
# Assigns 100% of the general ownership of the asset to @auth.
def obtain!
update general: [{ratio: 100, owner: @auth.owner_name, type: :exclude}]
end
# Releases 100% of the general ownership of the asset from @auth.
def release!
update general: [{ratio: 0, owner: @auth.owner_name, type: :exclude}]
end
# @return [Array<RightOwner>] a list that identifies the owners of an
# asset and the territories where each owner has ownership.
# General asset ownership is used for all types of assets and is the
# only type of ownership data that can be provided for assets that are
# not compositions.
has_attribute :general_owners, from: :general do |data|
as_owners data
end
# @return [Array<RightOwner>] a list that identifies owners of the
# performance rights for a composition asset.
has_attribute :performance_owners, from: :performance do |data|
as_owners data
end
# @return [Array<RightOwner>] a list that identifies owners of the
# synchronization rights for a composition asset.
has_attribute :synchronization_owners, from: :synchronization do |data|
as_owners data
end
# @return [Array<RightOwner>] a list that identifies owners of the
# mechanical rights for a composition asset.
has_attribute :mechanical_owners, from: :mechanical do |data|
as_owners data
end
private
# @see https://developers.google.com/youtube/partner/docs/v1/ownership/update
def update_params
super.tap do |params|
params[:expected_response] = Net::HTTPOK
params[:path] = "/youtube/partner/v1/assets/#{@asset_id}/ownership"
params[:params] = {on_behalf_of_content_owner: @auth.owner_name}
end
end
def as_owners(data)
(data || []).map{|owner_data| Yt::RightOwner.new data: owner_data}
end
end
end
end |
public static int Fibonacci(int n)
{
if (n == 0) return 0;
if (n == 1) return 1;
return Fibonacci(n - 1) + Fibonacci(n - 2);
} |
#!/bin/bash
REPO=mac-config
echo "Syncing Github Pages Build for $REPO.."
#
# What SHA are we going to deploy?
SHA=$(git rev-parse --short HEAD)
#
# When you build the static site, where is it generated?
BUILD_DIR=gh-pages
#
# Create the static site in ./gh-pages
rake build-gh-pages
#
# What branch serves as the source for Github Pages for this repo?
#
# Usually this is 'gh-pages', but it can be 'master' if you're dealing
# with a site for a User or an Organization.
GH_PAGES_BRANCH=gh-pages
#
# Where will clone locally to, copying on top of, etc.
WORK_DIR=tmp/$GH_PAGES_BRANCH
# Step 0
echo "Cleaning up from last time.."
rm -rf $WORK_DIR
echo
echo
# Step 1
echo "Creating local copy of Github Page Branch"
mkdir -vp $WORK_DIR
GITHUB_URL=https://github.com/jedcn/$REPO.git
git clone $GITHUB_URL --branch $GH_PAGES_BRANCH --single-branch $WORK_DIR
echo
echo
# Step 2
echo "Deleting old content (but saving circle.yml)"
pushd $WORK_DIR
git rm -rf *
save_these_files="css/main.css css/prism.css js/vendor/prism.js"
for file in $save_these_files
do
git reset HEAD $file
git checkout $file
done
popd
echo
echo
# Step 3
echo "Copying over build"
cp -r $BUILD_DIR/* $WORK_DIR
echo
echo
# step 4
echo "Adding all files and creating new commit"
pushd $WORK_DIR
git add .
git commit -m "[deploy] Generated from $SHA"
popd
echo
echo
# Step 5
echo "Deploying.. (by pushing new commit)"
pushd $WORK_DIR
git push origin $GH_PAGES_BRANCH:$GH_PAGES_BRANCH
popd
|
#!python3.6
import sys
import re
def line2command(line):
m = re.search('\{.*\}\{', line)
cmd = m.group()[2:-2]
m = re.search('\}\{.*\}\n', line)
text = m.group()[2:-2]
return [cmd, text]
def plain2latex(line, cmds):
tmp = line
for cmd in cmds:
tmp = re.sub(" "+cmd[1]+" ", "~\\"+cmd[0]+"~", tmp)
tmp = re.sub(" "+cmd[1]+",", "~\\"+cmd[0]+",", tmp)
tmp = re.sub(" "+cmd[1]+"\.", "~\\"+cmd[0]+".", tmp)
return tmp
if __name__ == '__main__':
args = sys.argv
path = args[1]
with open(path) as f:
l = f.readlines()
commands = [i for i in l if "\\newcommand" in i and "[1]" not in i]
cmds = [line2command(i) for i in commands]
output = ""
for line in l:
output += plain2latex(line, cmds)
with open(path[:-4]+".fixed.tex", mode='w') as f:
f.write(output)
|
<reponame>wongoo/alipay-sdk-java-all<gh_stars>0
package com.alipay.api.response;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: alipay.business.mall.points.notify response.
*
* @author auto create
* @since 1.0, 2021-12-08 21:28:06
*/
public class AlipayBusinessMallPointsNotifyResponse extends AlipayResponse {
private static final long serialVersionUID = 3816813297257575417L;
}
|
<gh_stars>1-10
#include "cipher/aes_cipher.h"
#include "cipher/ctr_cipher_mode.h"
#include "string/extended_std_string.h"
#include "cipher/counter.h"
#include "hex_coding/hex_coding.h"
#include "gtest/gtest.h"
#include <vector>
struct PlaintextCiphertextPair {
ExtendedStdString plaintext;
ExtendedStdString ciphertext;
};
/* 当前从三个方面测试 BlockCipher:
* 1. 一次加密(或者解密)过程可以分为若干次 update() 与 1 次 final() 调用;但无论分成多少
* 次 update() 调用,得到的结果都是相同的.
* 2. BlockCipher 对象再经过一次加密(或解密)过程之后,其状态复原,即下一次对同样的明文(或
* 密文)再一次走加密(或解密)过程,得到的结果一致.
* 3. 若 BlockCipher 对象的实现支持原址加密(或解密),则应该也要测试这一块.
*/
TEST(AES256CTRCipherTest,test)
{
/* 当前测试逻辑:
*
* 1. 设明文 P 的长度为 x * kBlockSize + y;其中 x 为 P 中数据块的数目,而 y 为 P
* 中不足一块的内容的长度.
* 2. 再设 P 可以经过 z 次 update() 与 1 次 final() 调用;其中 z 次 update() 时
* 明文长度设置 A[0],A[1],...,A[z - 1];其中:
* - A[0],...,A[z - 2] 的值为 kBlockSize;
* - A[z - 1] = (x - (z - 1)) * kBlockSize;
*
* final() 时明文长度为 y;在实现中将最后一次 Update() 与 final() 组合成一次 final()
* 调用;
*/
constexpr size_t kPlaintextBlockNumMax = 33;
// constexpr size_t kPlaintextSizeMax = kPlaintextBlockNumMax * AESCipher::kBlockSize + (AESCipher::kBlockSize - 1);
const ExtendedStdString key32 = HexDecode<ExtendedStdString>("0102030405060708091011121314151617181920212223242526272829303132");
ASSERT_EQ(32,key32.size());
AESCipher cipher;
cipher.SetKey(key32.const_raw_data(),key32.size());
pp_qq::Counter counter_impl;
CTRCipherMode ctr_mode;
ctr_mode.SetCounterImpl(&counter_impl);
BlockCipher block_cipher(&cipher,&ctr_mode,nullptr);
for (size_t x = 0; x <= kPlaintextBlockNumMax; ++x) {
for (size_t y = 0; y < AESCipher::kBlockSize; ++y) {
size_t const plaintext_size = x * AESCipher::kBlockSize + y;
const ExtendedStdString expect_plaintext(plaintext_size,'x');
ExtendedStdString expect_ciphertext;
ASSERT_EQ(plaintext_size,expect_plaintext.size());
// 测试1;
for (int z = 0; z <= static_cast<int>(x); ++z) {
ExtendedStdString ciphertext;
for (int d = 0; d <= z - 2; ++d) {
auto size_before = ciphertext.size();
EXPECT_EQ(AESCipher::kBlockSize,block_cipher.EncryptUpdate(ciphertext,expect_plaintext.const_raw_data() + d * AESCipher::kBlockSize,AESCipher::kBlockSize));
EXPECT_EQ(AESCipher::kBlockSize,ciphertext.size() - size_before);
}
auto *left_ptr = expect_plaintext.const_raw_data();
if (z - 1 > 0)
left_ptr += (z - 1) * AESCipher::kBlockSize;
auto left_size = expect_plaintext.const_raw_data() + expect_plaintext.size() - left_ptr;
{
auto size_before = ciphertext.size();
EXPECT_EQ(left_size,block_cipher.EncryptFinal(ciphertext,left_ptr,left_size));
EXPECT_EQ(left_size,ciphertext.size() - size_before);
}
ASSERT_EQ(expect_plaintext.size(),ciphertext.size());
if (!expect_plaintext.empty())
ASSERT_NE(expect_plaintext,ciphertext);
if (expect_ciphertext.empty())
expect_ciphertext = ciphertext;
else
EXPECT_EQ(expect_ciphertext,ciphertext);
}
// 测试 2,3
for (int z = 0; z <= static_cast<int>(x); ++z) {
ExtendedStdString ciphertext = expect_plaintext;
char *iptr = ciphertext.raw_data();
for (int d = 0; d <= z - 2; ++d) {
auto cipher_result = block_cipher.EncryptUpdate(iptr,AESCipher::kBlockSize,iptr,AESCipher::kBlockSize);
EXPECT_EQ(AESCipher::kBlockSize,cipher_result.input_size);
EXPECT_EQ(AESCipher::kBlockSize,cipher_result.output_size);
iptr += AESCipher::kBlockSize;
}
{
size_t left_size = ciphertext.const_raw_data() + ciphertext.size() - iptr;
auto cipher_result = block_cipher.EncryptFinal(iptr,left_size,iptr,left_size);
EXPECT_EQ(left_size,cipher_result.input_size);
EXPECT_EQ(left_size,cipher_result.output_size);
}
if (!expect_plaintext.empty())
ASSERT_NE(expect_plaintext,ciphertext);
EXPECT_EQ(expect_ciphertext,ciphertext);
}
// 测试解密 1;
for (int z = 0; z <= static_cast<int>(x); ++z) {
ExtendedStdString plaintext;
const char *iptr = expect_ciphertext.const_raw_data();
for (int d = 0; d <= z - 2; ++d) {
size_t size_before = plaintext.size();
auto ret_size = block_cipher.DecryptUpdate(plaintext,iptr,AESCipher::kBlockSize);
EXPECT_EQ(AESCipher::kBlockSize,ret_size);
EXPECT_EQ(AESCipher::kBlockSize,plaintext.size() - size_before);
iptr += AESCipher::kBlockSize;
}
{
size_t left_size = expect_ciphertext.const_raw_data() + expect_ciphertext.size() - iptr;
size_t size_before = plaintext.size();
size_t ret_size = block_cipher.DecryptFinal(plaintext,iptr,left_size);
EXPECT_EQ(left_size,ret_size);
EXPECT_EQ(left_size,plaintext.size() - size_before);
}
EXPECT_EQ(expect_plaintext,plaintext);
}
// 测试解密 2,3;
for (int z = 0; z <= static_cast<int>(x); ++z) {
ExtendedStdString plaintext = expect_ciphertext;
char *iptr = plaintext.raw_data();
for (int d = 0; d <= z - 2; ++d) {
auto cipher_result = block_cipher.DecryptUpdate(iptr,AESCipher::kBlockSize,iptr,AESCipher::kBlockSize);
EXPECT_EQ(AESCipher::kBlockSize,cipher_result.input_size);
EXPECT_EQ(AESCipher::kBlockSize,cipher_result.output_size);
iptr += AESCipher::kBlockSize;
}
{
size_t left_size = plaintext.const_raw_data() + plaintext.size() - iptr;
auto cipher_result = block_cipher.DecryptFinal(iptr,left_size,iptr,left_size);
EXPECT_EQ(left_size,cipher_result.input_size);
EXPECT_EQ(left_size,cipher_result.output_size);
}
EXPECT_EQ(expect_plaintext,plaintext);
}
}
}
}
|
<filename>src/main/scala/Algorithms/Recursion/Min.scala
package Algorithms.Recursion
/**
* Created by MikBac on 29.09.2020
*/
object Min {
def min(xs: List[Int]): Int = {
if (xs.length != 1) {
min(xs.head, min(xs.tail))
} else {
xs.head
}
}
def min(numb1: Int, numb2: Int): Int = {
if (numb1 < numb2) {
numb1
} else {
numb2
}
}
def main(args: Array[String]): Unit = {
println(min(List(1, 2, 6, 0, 9, 1, 2)))
println(min(List(1, 2, 3, 4)))
println(min(List(6, 4, 6)))
}
}
|
package com.unboundid.ldap.sdk;
public class LDAPConnection {
public AsyncRequestID asyncSearch(ReadOnlySearchRequest searchRequest) throws LDAPException { return null; }
public AsyncRequestID asyncSearch(SearchRequest searchRequest) throws LDAPException { return null; }
public SearchResult search(ReadOnlySearchRequest searchRequest) throws LDAPSearchException { return null; }
public SearchResult search(SearchRequest searchRequest) throws LDAPSearchException { return null; }
public SearchResult search(SearchResultListener searchResultListener, String baseDN, SearchScope scope, DereferencePolicy derefPolicy,
int sizeLimit, int timeLimit, boolean typesOnly, Filter filter, String... attributes) throws LDAPSearchException { return null; }
public SearchResult search(SearchResultListener searchResultListener, String baseDN, SearchScope scope, DereferencePolicy derefPolicy,
int sizeLimit, int timeLimit, boolean typesOnly, String filter, String... attributes) throws LDAPSearchException { return null; }
public SearchResult search(String baseDN, SearchScope scope, DereferencePolicy derefPolicy, int sizeLimit, int timeLimit,
boolean typesOnly, String filter, String... attributes) throws LDAPSearchException { return null; }
public SearchResultEntry searchForEntry(String baseDN, SearchScope scope, DereferencePolicy derefPolicy, int timeLimit,
boolean typesOnly, String filter, String... attributes) throws LDAPSearchException { return null; }
}
|
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import {
Close16,
WarningFilled16,
CheckmarkFilled16,
} from '@rocketsoftware/icons-react';
import { settings } from '@rocketsoftware/carbon-components';
import PropTypes from 'prop-types';
import React from 'react';
import Loading from '../Loading';
const { prefix } = settings;
function Filename({ iconDescription, status, invalid, ...rest }) {
switch (status) {
case 'uploading':
return (
<Loading description={iconDescription} small withOverlay={false} />
);
case 'edit':
return (
<>
{invalid && <WarningFilled16 className={`${prefix}--file-invalid`} />}
<button
aria-label={iconDescription}
className={`${prefix}--file-close`}
type="button"
{...rest}>
<Close16 />
</button>
</>
);
case 'complete':
return (
<CheckmarkFilled16
aria-label={iconDescription}
className={`${prefix}--file-complete`}
{...rest}>
{iconDescription && <title>{iconDescription}</title>}
</CheckmarkFilled16>
);
default:
return null;
}
}
Filename.propTypes = {
/**
* Provide a description of the SVG icon to denote file upload status
*/
iconDescription: PropTypes.string,
/**
* Specify if the file is invalid
*/
invalid: PropTypes.bool,
/**
* Status of the file upload
*/
status: PropTypes.oneOf(['edit', 'complete', 'uploading']),
/**
* Provide a custom tabIndex value for the <Filename>
*/
tabIndex: PropTypes.string,
};
Filename.defaultProps = {
iconDescription: 'Uploading file',
status: 'uploading',
tabIndex: '0',
};
export default Filename;
|
<reponame>afreeorange/wifi-qrcode-generator
(() => {
const IMAGE_SIZE_IN_PIXELS = 1024;
const $ = document.querySelector.bind(document);
const qrcode = new QRCode($(".qrcode"), {
text: "http://jindo.dev.naver.com/collie",
width: IMAGE_SIZE_IN_PIXELS,
height: IMAGE_SIZE_IN_PIXELS,
colorDark: "#000000",
colorLight: "#ffffff",
correctLevel: QRCode.CorrectLevel.H,
useSVG: true,
});
const generateWifiString = (networkName, networkPassword) =>
`WIFI:T:WPA;S:${networkName};P:${networkPassword};;`;
// Values 'stick' for some reason
$('.color-foreground').value = "#000000";
$('.color-background').value = "#FFFFFF";
$(".name").addEventListener("keyup", e => {
qrcode.makeCode(generateWifiString(e.target.value, $(".password").value));
});
$(".password").addEventListener("keyup", e => {
qrcode.makeCode(generateWifiString($(".name").value, e.target.value));
});
$(".color-foreground").addEventListener("change", e => {
qrcode._htOption.colorDark = "#" + e.target.value;
qrcode.makeCode(generateWifiString($(".name").value, $(".password").value));
});
$(".color-background").addEventListener("change", e => {
qrcode._htOption.colorLight = "#" + e.target.value;
qrcode.makeCode(generateWifiString($(".name").value, $(".password").value));
});
$(".download").addEventListener("click", () => {
download(
$(".qrcode").getElementsByTagName("img")[0].src,
$(".name").value + ".png",
"image/png",
);
});
$(".qrcode").addEventListener("click", () => {
download(
$(".qrcode").getElementsByTagName("img")[0].src,
$(".name").value + ".png",
"image/png",
);
});
})();
|
echo "In run.sh script!"
date
chpl --version
module list
BENCHMARKS="stream argsort gather scatter reduce scan"
node_counts=(1 2 4 8 16 32)
for benchmark in ${BENCHMARKS}; do
for i in {1..1}; do
for nodes in "${node_counts[@]}"; do
dir=$PWD/ak-perf-$nodes
rm -rf $dir
./benchmarks/run_benchmarks.py -nl $nodes --dat-dir $dir --gen-graphs $benchmark --size=$((2**30)) --trials=1
done
done
./print.py
mv "$benchmark.dat" "lg-$benchmark.dat"
for i in {1..1}; do
for nodes in "${node_counts[@]}"; do
dir=$PWD/ak-perf-$nodes
rm -rf $dir
./benchmarks/run_benchmarks.py -nl $nodes --dat-dir $dir --gen-graphs $benchmark
done
done
./print.py
done
date
cd $workdir
echo "DONE"
|
import { Injectable } from "@angular/core";
import { StarPRNT } from "@ionic-native/star-prnt/ngx";
import { Printer, PrintOptions } from "@ionic-native/printer/ngx";
import { AlertController, Platform } from "@ionic/angular";
import { File } from "@ionic-native/file/ngx";
import { FileOpener } from "@ionic-native/file-opener/ngx";
import { BluetoothSerial } from "@ionic-native/bluetooth-serial/ngx";
import { commands } from "../printer-commands";
import pdfMake from "pdfmake/build/pdfmake";
import pdfFonts from "pdfmake/build/vfs_fonts";
import { DateProvider } from "../../service/date";
pdfMake.vfs = pdfFonts.pdfMake.vfs;
import { MutableBuffer } from "mutable-buffer";
@Injectable({
providedIn: "root"
})
export class PrinterService {
options: PrintOptions = {
name: "MyDocument",
printerId: "printer007",
duplex: true,
landscape: true,
grayscale: true
};
buff = new MutableBuffer(1024, 1024);
pdfObj = null;
societe: any;
address: any;
ristournes=0;
constructor(
private starprnt: StarPRNT,
private printer: Printer,
public alert: AlertController,
public plt: Platform,
private file: File,
private fileOpener: FileOpener,
private dateP: DateProvider,
private btSerial: BluetoothSerial
) {
if (localStorage.getItem("societe")) {
this.societe = JSON.parse(localStorage.getItem("societe"));
}
}
searchBt(commande?: any) {
let that = this;
if (commande) {
this.createCommandePdfMiniPrint(commande);
console.log(this.buff);
if (this.plt.is("cordova")) {
this.btSerial.list().then(async dataList => {
let allPrint = [];
for (let i = 0; i < dataList.length; i++) {
allPrint.push({
name: "idPrinter",
type: "radio",
label: dataList[i].name,
value: dataList[i].id
});
}
const alert = await that.alert.create({
header: "Imprimantes",
inputs: allPrint,
buttons: [
{
text: "Cancel",
role: "cancel",
cssClass: "secondary",
handler: () => {}
},
{
text: "Ok",
handler: data => {
that.address = data;
that.Printer(this.buff.buffer);
}
}
]
});
alert.present();
});
}
}
}
connectBT(address) {
return this.btSerial.connect(address);
}
async presentAlert(msg) {
let mno = await this.alert.create({
header: msg,
buttons: ["OK"]
});
}
Printer(dataToPrint) {
if (this.plt.is("cordova")) {
let xyz = this.connectBT(this.address).subscribe(
data => {
this.btSerial.write(this.buff.buffer).then(
dataz => {
this.presentAlert("Print SUCCESS!");
this.buff.clear();
xyz.unsubscribe();
},
errx => {
this.presentAlert("ERROR " + errx);
this.buff.clear();
xyz.unsubscribe();
}
);
},
err => {
this.presentAlert("ERROR " + err);
this.buff.clear();
}
);
}
}
public printDocumentBTPrinter(commande,ristourne) {
this.ristournes=ristourne;
this.searchBt(commande);
}
public printDocument() {
this.starprnt
.portDiscovery("all")
.then((res: any) => alert(res))
.catch((error: any) => console.error(error));
}
/**
*
* @param port
* @description emulation valu "StarPRNT", "StarPRNTL", "StarLine", "StarGraphic", "EscPos", "EscPosMobile", "StarDotImpact"
*/
printWithStartPrinter(port, printObj, emulation?: "StarPRNT") {
this.starprnt.printRawText(port, emulation, printObj);
}
printwithPrinter(commande,ristourne) {
this.ristournes=ristourne;
let that = this;
/* this.printer.isAvailable().then(
resp => {
that.printer
.print(content, this.options)
.then(onSuccess => {}, onError => {});
},
err => {}
); */
this.downloadPdf(commande);
}
createCommandePdfMiniPrint(LCommande): any {
// u can remove this when generate the receipt using another method
let receipt = "";
if (this.societe.nom) {
this.buff.write(commands.HARDWARE.HW_INIT);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_CT);
this.buff.write(this.societe.nom);
this.buff.write(commands.EOL);
}
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write("commande: " + LCommande.id);
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write("client: " + LCommande.client.nom);
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write("date: " + LCommande.datecc);
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write("livraison: " + LCommande.dateliv);
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write("Statut : " + LCommande.etatc.nom);
//tableau des produits
let total = 0;
let totalP = 0;
this.buff.write(commands.EOL + commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write("Produits \n");
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write("Article \t\t ");
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_OFF);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write("Quantite \t\t ");
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_OFF);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write("Prix \n");
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_OFF);
this.buff.write(commands.EOL);
if (LCommande.tcommandesList) {
var Article = LCommande.tcommandesList.filter(function(elt) {
return (
!elt.article.code.includes("VRAP") &&
!elt.article.code.includes("VRACC")
);
});
var emballage = LCommande.tcommandesList.filter(function(elt) {
return (
elt.article.code.includes("VRAP") ||
elt.article.code.includes("VRACC")
);
});
for (let i = 0; i < Article.length; i++) {
let item = Article[i];
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(item.article.code + "\t\t ");
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(this.formatNumber(item.quantite) + "\t\t ");
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(this.formatNumber(item.prixTotal + "\n"));
total += item.prixTotal;
totalP += item.quantite;
}
this.buff.write(commands.EOL + commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write("Emballages \n");
for (let i = 0; i < emballage.length; i++) {
let item = emballage[i];
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(item.article.code + "\t\t ");
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(this.formatNumber(item.quantite) + "\t\t ");
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(this.formatNumber(item.prixTotal + "\n"));
total += item.prixTotal;
totalP += item.quantite;
}
}
//pied de page
this.buff.write(commands.EOL);
this.buff.write(commands.HORIZONTAL_LINE.HR_58MM);
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write('Qte Produit : ' + this.formatNumber(totalP));
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write('Total : ' + this.formatNumber(total)+' FCFA');
const pourcentage = 1.2425;
var totalHT = Math.round(total / pourcentage);
var tva = Math.round(totalHT * (19.25 / 100));
var prov = Math.round(totalHT * (5 / 100));
this.buff.write(commands.EOL);
this.buff.write(commands.HORIZONTAL_LINE.HR_58MM);
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write('Montant HT : \t' + this.formatNumber(totalHT)+' FCFA');
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write('Retenu TVA(19,25%) : \t' + this.formatNumber(tva)+' FCFA');
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write('PSA(5%) : \t' + this.formatNumber(prov)+' FCFA');
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write('Montant TTC : \t' + this.formatNumber(total)+' FCFA');
this.buff.write(commands.EOL);
this.buff.write(commands.HORIZONTAL_LINE.HR_58MM);
this.buff.write(commands.EOL);
if (LCommande.transport) {
total +=LCommande.transport;
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write('Transport : \t' + this.formatNumber(LCommande.transport)+' FCFA');
}
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_CT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write('NET A PAYER : \t' + this.formatNumber(total)+' FCFA');
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
if (LCommande.margeClient) {
this.buff.write('Rist encours : \t' + this.formatNumber(LCommande.margeClient)+' FCFA');
this.buff.write(commands.EOL);
this.buff.write('Total Rist : \t' + this.formatNumber(this.ristournes)+' FCFA');
}
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_CT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write('\t\t SIGNATURE \t');
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_LT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write('VENDEUR \t\t CLIENT');
this.buff.write(commands.EOL);
this.buff.write(commands.EOL);
this.buff.write(commands.EOL);
this.buff.write(commands.EOL);
this.buff.write(commands.EOL);
this.buff.write(commands.EOL);
this.buff.write(commands.TEXT_FORMAT.TXT_NORMAL);
this.buff.write(commands.TEXT_FORMAT.TXT_ALIGN_CT);
this.buff.write(commands.TEXT_FORMAT.TXT_BOLD_ON);
this.buff.write('MERCI DE VOTRE CONFIANCE !!!');
//code bar
/* const obj= {
id:btoa(LCommande.id),
type:'commande'
};
const qr =JSON.stringify(obj);
this.buff.write(commands.CODE2D_FORMAT.CODE2D);
this.buff.writeUInt8(3);
this.buff.writeUInt8(3);
this.buff.writeUInt8(8);
this.buff.writeUInt16LE(qr.length);
this.buff.write(qr); */
//secure space on footer
this.buff.write(commands.EOL);
this.buff.write(commands.EOL);
this.buff.write(commands.EOL);
this.buff.write(commands.EOL);
this.buff.write(commands.PAPER.PAPER_FULL_CUT);
this.buff.write(commands.HARDWARE.HW_INIT);
this.buff.flush();
}
createCommandePdf(commande) {
var docDefinition;
var firstdata = commande;
let that = this;
docDefinition = {
info: {
title: "OCM DOCUMENT",
author: "Lalanda from EH2S",
subject: "subject of document",
keywords: "keywords for document"
},
pageMargins: [20, 30, 20, 30],
pageSize: "A4",
header: function(currentPage, pageCount) {
var tfoo = {
columns: [
{
text: "" + that.getDate(),
alignment: "left",
style: "footerleft"
},
{
text: currentPage.toString() + " / " + pageCount,
alignment: "right",
style: "footer"
}
]
};
return tfoo;
},
footer: function(currentPage, pageCount) {
var tfoo = {
columns: [
{
text: "" + that.getDate(),
alignment: "left",
style: "footerleft"
},
{
text: " ",
alignment: "center",
style: "footercenter",
link: "https://www.eh2s.com"
},
{
text: currentPage.toString() + " / " + pageCount,
alignment: "right",
style: "footer"
}
]
};
return tfoo;
},
content: that.getContentCommande(firstdata),
pageBreakBefore: function(
currentNode,
followingNodesOnPage,
nodesOnNextPage,
previousNodesOnPage
) {
return (
currentNode.headlineLevel === 1 && followingNodesOnPage.length === 0
);
},
styles: {
img: {
margin: [0, 0, 0, 0]
},
header_center_top: {
fontSize: 12,
bold: true,
margin: [35, 5, 0, 10],
alignment: "center"
},
header_center_bottom: {
bold: false,
margin: [0, 0, 0, 0]
},
subheader: {
fontSize: 12,
bold: true,
margin: [80, 0, 0, 0]
},
headertable: {
margin: [0, 20, 0, 10]
},
title: {
fontSize: 12,
bold: true,
alignment: "center"
},
subtitle: {
fontSize: 10,
bold: false,
margin: [170, 10, 0, 5]
},
tableExample: {
margin: [0, 5, 0, 0]
},
tableHeader: {
bold: true,
fontSize: 10
},
tableFooter: {
bold: true,
fontSize: 12,
margin: [0, 7, 0, 10]
},
tableContent: {
bold: false,
fontSize: 9,
margin: [0, 0, 0, 0]
},
footer: {
bold: true,
fontSize: 8,
margin: [0, 0, 20, 0]
},
footerleft: {
bold: true,
fontSize: 8,
margin: [20, 0, 0, 0]
},
footercenter: {
bold: true,
fontSize: 8,
margin: [0, 0, 0, 0]
}
},
defaultStyle: {
// alignment: 'justify'
}
};
this.pdfObj = pdfMake.createPdf(docDefinition);
}
downloadPdf(commande) {
const name = "commande_" + this.dateP.getDate() + ".pdf";
this.createCommandePdf(commande);
if (this.plt.is("cordova")) {
this.pdfObj.getBuffer(buffer => {
var utf8 = new Uint8Array(buffer);
var binaryArray = utf8.buffer;
var blod = new Blob([binaryArray], { type: "application/pdf" });
this.file
.writeFile(this.file.dataDirectory, name, blod, {
replace: true
})
.then(fileEntry => {
this.fileOpener.open(
this.file.dataDirectory + name,
"application/pdf"
);
});
});
} else {
this.pdfObj.download(name);
}
}
getContentCommande(data) {
var content = new Array();
let img: any;
if (this.societe.logo) {
img = {
// if you specify width, image will scale proportionally
image: "data:image/png;base64," + this.societe.logo,
width: 70,
height: 70,
style: "img"
};
} else {
img = {
// if you specify width, image will scale proportionally
text: "",
width: 70,
height: 70,
style: "img"
};
}
content.push({
style: "headertable",
table: {
widths: ["*", "auto", "*"],
body: [
[
img,
[
{ text: this.societe.nom, style: "header_center_top" },
{
text: this.societe.email + "\t Tel : " + this.societe.tel,
style: "header_center_bottom",
alignment: "center"
}
],
{ text: "", style: "subheader" }
]
]
},
layout: {
hLineWidth: function(i, node) {
return i === 0 || i === node.table.body.length ? 2 : 1;
},
vLineWidth: function(i, node) {
return i === 0 || i === node.table.widths.length ? 2 : 1;
},
hLineColor: function(i, node) {
return i === 0 || i === node.table.body.length ? "black" : "white";
},
vLineColor: function(i, node) {
return i === 0 || i === node.table.widths.length ? "black" : "white";
}
}
});
var header = {
margin: [30, 10, 0, 10],
text: [
{
text: "<NAME> \t N° " + data.id,
fontSize: 15,
italics: true,
bold: true,
alignment: "center"
}
]
};
content.push(header);
const colun1 = {
columns: [
{
width: "*",
text: [
{
text: "Info Client \n\n",
bold: true,
alignment: "center",
color: "#36cee2",
margin: [0, 10, 0, 40]
},
{ text: "Nom : \t", bold: true },
{ text: data.client.nom + " \n" },
{ text: "Tel : \t", bold: true },
{ text: data.client.tel }
]
},
{
width: 70,
text: ""
},
{
width: "*",
text: [
{
text: "Detail commande \n\n",
bold: true,
alignment: "center",
color: "#36cee2"
},
{ text: "Crée le : \t", bold: true },
{ text: data.datecc + " \n" },
{ text: "Echeance : \t", bold: true },
{ text: data.dateliv + " \n" },
{ text: "Statut : \t", bold: true },
{ text: data.etatc.nom + " \n" }
]
}
]
};
content.push(colun1);
var total = 0;
var totalP = 0;
var bodyCMD = [
[
{ text: "code", bold: true },
{ text: "Article", bold: true },
{ text: "P.U", bold: true },
{ text: "Qte", bold: true },
{ text: "P.T", bold: true }
]
];
for (var i = 0; i < data.tcommandesList.length; i++) {
var cmd = [];
var item = data.tcommandesList[i];
cmd.push({ text: item.article.code });
cmd.push({ text: item.article.nom, fontSize: 9 });
cmd.push({ text: "" + this.formatNumber(item.prix), fontSize: 9 });
cmd.push({ text: "" + this.formatNumber(item.quantite), fontSize: 9 });
cmd.push({ text: "" + this.formatNumber(item.prixTotal), fontSize: 9 });
total += item.prixTotal;
totalP += item.quantite;
bodyCMD.push(cmd);
}
var description = {
style: "headertable",
table: {
widths: ["auto", "*", "auto", "auto", "auto"],
body: bodyCMD,
pageBreak: "after",
margin: [0, 0, 20, 0]
}
};
content.push(description);
content.push({
text: "Qte Total : " + this.formatNumber(totalP),
bold: true,
color: "#00b5b8"
});
content.push({
text: "Total : " + this.formatNumber(total),
bold: true,
color: "#00b5b8"
});
content.push({ text: "\n\n" });
const colun2 = {
columns: [
{
width: "*",
text: "Vendeur",
bold: true
},
{
width: "*",
text: ""
},
{
width: "*",
text: "Client",
bold: true
}
]
};
content.push(colun2);
const obj = {
id: btoa(data.id),
type: "commande"
};
const qr = JSON.stringify(obj);
content.push({ qr: qr, alignment: "center", fit: 50 });
return content;
}
noSpecialChars(string) {
var translate = {
à: "a",
á: "a",
â: "a",
ã: "a",
ä: "a",
å: "a",
æ: "a",
ç: "c",
è: "e",
é: "e",
ê: "e",
ë: "e",
ì: "i",
í: "i",
î: "i",
ï: "i",
ð: "d",
ñ: "n",
ò: "o",
ó: "o",
ô: "o",
õ: "o",
ö: "o",
ø: "o",
ù: "u",
ú: "u",
û: "u",
ü: "u",
ý: "y",
þ: "b",
ÿ: "y",
ŕ: "r",
À: "A",
Á: "A",
Â: "A",
Ã: "A",
Ä: "A",
Å: "A",
Æ: "A",
Ç: "C",
È: "E",
É: "E",
Ê: "E",
Ë: "E",
Ì: "I",
Í: "I",
Î: "I",
Ï: "I",
Ð: "D",
Ñ: "N",
Ò: "O",
Ó: "O",
Ô: "O",
Õ: "O",
Ö: "O",
Ø: "O",
Ù: "U",
Ú: "U",
Û: "U",
Ü: "U",
Ý: "Y",
Þ: "B",
Ÿ: "Y",
Ŕ: "R"
},
translate_re = /[àáâãäåæçèéêëìíîïðñòóôõöøùúûüýþßàáâãäåæçèéêëìíîïðñòóôõöøùúûýýþÿŕŕÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÝÝÞŸŔŔ]/gim;
return string.replace(translate_re, function(match) {
return translate[match];
});
}
formatNumber(number) {
let formated = "";
let format = number.toString();
let count = 0;
for (let i = format.length - 1; i >= 0; i--) {
if (count % 3 === 0) {
if (i === format.length - 1) {
formated = format[i];
} else {
formated = format[i] + "." + formated;
}
} else {
formated = format[i] + formated;
}
count++;
}
return formated;
}
getDate() {
var date = new Date();
var jour = date.getDate().toString();
var mois = (date.getMonth() + 1).toString();
var heure = date.getHours().toString();
var min = date.getMinutes().toString();
var sec = date.getSeconds().toString();
if (date.getDate() + 1 < 10) {
jour = "0" + date.getDate();
}
if (date.getMonth() + 1 < 10) {
mois = "0" + (date.getMonth() + 1);
}
if (date.getHours() + 1 < 10) {
heure = "0" + (heure + 1);
}
if (date.getMinutes() + 1 < 10) {
min = "0" + (min + 1);
}
if (date.getSeconds() + 1 < 10) {
sec = "0" + (sec + 1);
}
return (
jour +
"/" +
mois +
"/" +
date.getFullYear() +
" " +
heure +
":" +
min +
":" +
sec
);
}
}
|
#! /bin/bash
# Copyright 2020 Peter Williams <peter@newton.cx> and collaborators
# Licensed under the MIT License.
# A very simple script to build static GitHub Pages content. This will
# probably be superseded pretty soon.
set -euo pipefail
cd "$(dirname $0)"
version="$(cranko show version cranko)"
sed -e "s/@VERSION@/${version}/g" fetch-tgz.tmpl.sh >content/fetch-latest.sh
sed -e "s/@VERSION@/${version}/g" fetch-zip.tmpl.ps1 >content/fetch-latest.ps1
mkdir -p content/book/latest/
mdbook build -d $(pwd)/content/book/latest ../book
|
def calculate_bmi(height, weight):
return round(weight / (height / 100) ** 2, 2) |
#!/bin/bash
set -e
case $1 in
noroot)
shift
echo "==> executing as non-root user: $@"
exec gosu ${DEFAULT_USER} $@
;;
*)
echo "==> executing as root user: $@"
cmdline="$@"
exec ${cmdline:-"bash"}
;;
esac
# vim:set ft=sh ff=unix:
|
#!/usr/bin/env python3
# coding=utf-8
#
# Copyright (c) 2020 Huawei Device Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import os
import time
from core.config.config_manager import UserConfigManager
def get_filename_extension(file):
_, fullname = os.path.split(file)
filename, ext = os.path.splitext(fullname)
return filename, ext
def create_dir(path):
full_path = os.path.abspath(os.path.expanduser(path))
if not os.path.exists(full_path):
os.makedirs(full_path, exist_ok=True)
def get_file_list(find_path, postfix=""):
file_names = os.listdir(find_path)
file_list = []
if len(file_names) > 0:
for file_name in file_names:
if postfix != "":
if file_name.find(postfix) != -1 \
and file_name[-len(postfix):] == postfix:
file_list.append(file_name)
else:
file_list.append(file_name)
return file_list
def get_file_list_by_postfix(path, postfix=""):
file_list = []
for dirs in os.walk(path):
files = get_file_list(find_path=dirs[0], postfix=postfix)
for file_name in files:
if "" != file_name and -1 == file_name.find(__file__):
file_name = os.path.join(dirs[0], file_name)
if os.path.isfile(file_name):
file_list.append(file_name)
return file_list
def get_device_log_file(report_path, serial=None, log_name="device_log"):
log_path = os.path.join(report_path, "log")
os.makedirs(log_path, exist_ok=True)
serial = serial or time.time_ns()
device_file_name = "{}_{}.log".format(log_name, serial)
device_log_file = os.path.join(log_path, device_file_name)
return device_log_file
def get_build_output_path():
if sys.source_code_root_path == "":
return ""
manager = UserConfigManager()
if manager.get_user_config_flag("common", "doublefwk"):
para_dic = manager.get_user_config("build", "paramter")
target_os = para_dic.get("target_os", "")
target_cpu = para_dic.get("target_cpu", "")
variant = para_dic.get("variant", "")
build_output_name = "%s-%s-%s" % (target_os, target_cpu, variant)
if build_output_name == "ohos-arm64-release":
build_output_name = "release"
else:
para_dic = manager.get_user_config("build", "board_info")
board_series = para_dic.get("board_series", "")
board_type = para_dic.get("board_type", "")
board_product = para_dic.get("board_product", "")
first_build_output = "%s_%s" % (board_series, board_type)
second_build_output = "%s_%s" % (board_product, first_build_output)
build_output_name = os.path.join(first_build_output,
second_build_output)
build_output_path = os.path.join(
sys.source_code_root_path,
"out",
build_output_name)
return build_output_path
def is_32_bit_test():
manager = UserConfigManager()
para_dic = manager.get_user_config("build", "paramter")
target_cpu = para_dic.get("target_cpu", "")
if target_cpu == "arm":
return True
return False
def get_decode(stream):
if not isinstance(stream, str) and not isinstance(stream, bytes):
ret = str(stream)
else:
try:
ret = stream.decode("utf-8", errors="ignore")
except (ValueError, AttributeError, TypeError):
ret = str(stream)
return ret
|
<reponame>hoodini47/gatsby-video-portfolio-page<filename>src/components/list-of-menu-items.js
import React from "react"
import { Link } from "gatsby"
import { graphql, useStaticQuery } from 'gatsby'
export const ListOfMenuItems = () => {
const data = useStaticQuery(graphql`
query MyMenuQuery {
allDatoCmsMenu {
nodes {
position
menuItem
categoryNumber
}
}
}
`)
return (
<ul id="menu">
{
data.allDatoCmsMenu.nodes.sort(
(a, b) => {
const positionA = a.position;
const positionB = b.position;
let comparision = 0;
if(positionA > positionB) {
comparision = 1;
} else if (positionA < positionB) {
comparision = -1
}
return comparision
}
).map((block, index) => (
<li key={index}>
<p key={index}>
{block.menuItem}
</p>
</li>
))
}
</ul>
)
}
export default ListOfMenuItems |
export type Noop = () => void;
export type Primitive = string | number | boolean;
|
echo "# 默认注释了源码镜像以提高 apt update 速度,如有需要可自行取消注释">/etc/apt/sources.list \
&& echo "deb https://mirrors.tuna.tsinghua.edu.cn/debian/ buster main contrib non-free">>/etc/apt/sources.list \
&& echo "# deb-src https://mirrors.tuna.tsinghua.edu.cn/debian/ buster main contrib non-free">>/etc/apt/sources.list \
&& echo "deb https://mirrors.tuna.tsinghua.edu.cn/debian/ buster-updates main contrib non-free">>/etc/apt/sources.list \
&& echo "# deb-src https://mirrors.tuna.tsinghua.edu.cn/debian/ buster-updates main contrib non-free">>/etc/apt/sources.list \
&& echo "deb https://mirrors.tuna.tsinghua.edu.cn/debian/ buster-backports main contrib non-free">>/etc/apt/sources.list \
&& echo "# deb-src https://mirrors.tuna.tsinghua.edu.cn/debian/ buster-backports main contrib non-free">>/etc/apt/sources.list \
&& echo "deb https://mirrors.tuna.tsinghua.edu.cn/debian-security buster/updates main contrib non-free">>/etc/apt/sources.list \
&& echo "# deb-src https://mirrors.tuna.tsinghua.edu.cn/debian-security buster/updates main contrib non-free">>/etc/apt/sources.list \
&& apt-get update -y
|
#!/bin/sh
$((123))
$((a|=123))
$((a&=123))
$((a^=123))
|
def get_latest_version():
"""
Get the latest version of the ‘test_package’ package.
:return: The version string (e.g. '1.0.0')
"""
url = 'https://test_package-db.org/api/version.json'
response = requests.get(url)
data = response.json()
version = data['version']
return version |
<reponame>Nebulon/nebpyclient<gh_stars>1-10
#
# Copyright 2021 Nebulon, Inc.
# All Rights Reserved.
#
# DISCLAIMER: THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
# EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
# OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
from enum import Enum
from datetime import datetime
__all__ = [
"read_value",
"time_to_str",
"parse_time",
"NebEnum",
"PageInput",
"DateFormat",
"ResourceType",
]
def parse_time(value: str) -> datetime:
"""Parse and convert JSON encoded string to a datetime object
Parses a ``str`` and converts it to a ``datetime`` object. If the string is
not a valid JSON (JavaScript) encoded datetime object, this function will
return the minimum datetime value (``datetime.min``).
:param value: The string value to parse
:type value: str
:returns datetime: A ``datetime`` version of the provided JSON-time string
"""
try:
return datetime.strptime(value, "%Y-%m-%dT%H:%M:%SZ")
except ValueError:
return datetime.min
def time_to_str(value: datetime) -> str:
"""Convert a ``datetime`` object to a JSON (JavaScript) string
Formats the provided datetime object to a ``str`` that is compliant with the
JSON schema. Example: `2020-01-01T10:10:10Z`.
:param value: The ``datetime`` object to convert
:type value: datetime
:raises ValueError: If the provided value is not a valid datetime object
:returns str: The JSON (JavaScript) compliant version of the date and time
"""
if value is None or not isinstance(value, datetime):
raise ValueError("provided value is not a valid datetime object")
return value.strftime("%Y-%m-%dT%H:%M:%SZ")
def read_value(
key_path: str,
data: dict,
data_type: type,
mandatory=True
) -> any:
"""Helper function to extract values from a response ``dict``
Allows extraction of nested values from a ``dict`` for convenience.
This also allows for type checking and for validating that mandatory
properties were supplied.
:param key_path: A JSONPath-like path to a value in the dictionary. Each
hierarchy is separated via a dot. Example: ``parent_key.child_key`` will
lookup a value in the provided dict ``data["parent_key"]["child_key"]``
:type key_path: str
:param data: A ``dict`` of values, typically JSON returned from the
nebulon ON API. Values will be looked up in this ``dict``
:type data: dict
:param data_type: The expected data type for the lookup value. If the
lookup value is a list, the expected element type shall be supplied.
:type data_type: type
:param mandatory: Indicates if the lookup value must be provided. If set
to ``True`` the lookup value must not be ``None`` or a ``ValueError`` is
raised.
:returns any: Returns the value in the ``dict`` (if found) that is
identified via the provided ``key_path``. If the value is not found or
if the value is ``None`` while marked as mandatory, a ``ValueError``
is raised.
:raises ValueError: If the value indicated by ``key_path`` is not found in
the supplied ``data`` parameter, if the lookup value is ``None`` while
it is a mandatory value.
:raises TypeError: If the data type of the value found by ``key_path``
in the provided ``data`` parameter is not matching the data type that
is provided in the parameter ``data_type``.
"""
# build the path. we expect a ``key_path`` that looks like this:
# "key1.key2.key3" -> ["key1", "key2", "key3"]
segments = key_path.split(".")
# segments should always have at least one element that exists in the
# dictionary that is provided via ``data``.
if data is None or len(segments) == 0 or segments[0] not in data:
if mandatory:
raise ValueError(f"provided key {key_path} is invalid for {data}")
return None
# handle the current key. this could be any key in the hierarchy
key = segments[0]
value = data[key]
# first we need to check for it to be not None if it is a mandatory value.
# it is ok to return None if the value is not mandatory
if value is None:
if mandatory:
raise ValueError(f"required property {key} was not set")
return None
# if there are more children, we need to return the contents of these
# instead of the current value
if len(segments) > 1:
child_key = ".".join(segments[1:])
# handle lists separately
if isinstance(value, list):
return [read_value(child_key, i, data_type, mandatory)
for i in value]
# single items we can just return
return read_value(child_key, value, data_type, mandatory)
# this is the last element in the hierarchy and we need to convert it to
# the expected data_type. Handle list separately
if isinstance(value, list):
return [__convert_value(key, i, data_type) for i in value]
return __convert_value(key, value, data_type)
def __convert_value(
key: str,
value: any,
data_type: type
) -> any:
"""Verify a named value for the specified type and convert if necessary
Allows type checking of a named value against a provided data type. It also
cleans up any type issues that may result from JSON encoding and decoding.
:param key: The name of the key in a dictionary. While this parameter is
not used for the type checking or conversion, it is used to provide a
meaningful error message.
:type key: str
:param value: The value that will be type-checked. If the supplied value
is ``None``, type checking is not done.
:type: any
:param data_type: The type that ``value`` needs to match or what it will be
converted to.
:type: type
:raises TypeError: An error indicating if there are any issues with the
supplied value matching the provided data type.
:returns any: The converted value in the specified type.
"""
if value is None:
return None
if isinstance(value, data_type):
return value
# convert any integers if a float is expected. This can happen during
# JSON encoding and decoding.
if data_type == float and isinstance(value, int):
return float(value)
# datetime objects are supplied as a JSON (JavaScript) string.
if data_type == datetime and isinstance(value, str):
return parse_time(value)
# enumerations are supplied as strings
if issubclass(data_type, NebEnum) and isinstance(value, str):
return getattr(data_type, "parse")(value)
# dicts are interpreted as objects, so we instantiate a new object from
# the provided dictionary. This may fail if the supplied data_type does
# not have a constructor that accepts a dict.
if isinstance(value, dict):
return data_type(value)
# if we got to this place an invalid data type was supplied and we raise
# a TypeError.
error = f"{key} of invalid type {data_type}, got {value.__class__}"
raise TypeError(error)
class NebEnum(Enum):
"""An enumeration that is used in nebulon ON
This enumeration is used as a parent class to provide convenience functions
for encoding and decoding enumerations that are defined in nebulon ON.
"""
@classmethod
def parse(
cls,
value: str
):
"""Construct a new ``Enum`` from the provided ``str`` value.
:param value: A string representation of a member of this ``Enum``.
:type value: str
:raises ValueError: If the value is not a member of the ``NebEnum``
:returns Enum: The ``NebEnum`` value that matches the provided value.
"""
if value is None or len(value) == 0:
raise ValueError("provided value may not be None or empty")
for item in cls:
if value == item.value:
# found a matching value
return item
# Fallback value in case the API adds an enum that is not supported
# by an older version of the SDK
return cls.Unknown
class DateFormat(NebEnum):
"""Defines available date and time format options
Examples:
* ANSIC: ``Mon Jan _2 15:04:05 2006``
* UnixDate: ``Mon Jan _2 15:04:05 MST 2006``
* RubyDate: ``Mon Jan 02 15:04:05 -0700 2006``
* RFC822: ``02 Jan 06 15:04 MST``
* RFC822Z: ``02 Jan 06 15:04 -0700``
* RFC850: ``Monday, 02-Jan-06 15:04:05 MST``
* RFC1123: ``Mon, 02 Jan 2006 15:04:05 MST``
* RFC1123Z: ``Mon, 02 Jan 2006 15:04:05 -0700``
* RFC3339: ``2006-01-02T15:04:05Z07:00``
* RFC3339Nano: ``2006-01-02T15:04:05.999999999Z07:00``
* Kitchen: ``3:04PM``
* Stamp: ``Jan _2 15:04:05``
* StampMilli: ``Jan _2 15:04:05.000``
* StampMicro: ``Jan _2 15:04:05.000000``
* StampNano: ``Jan _2 15:04:05.000000000``
"""
ANSIC = "ANSIC"
UNIX_DATE = "UnixDate"
RUBY_DATE = "RubyDate"
RFC822 = "RFC822"
RFC822_Z = "RFC822Z"
RFC850 = "RFC850"
RFC1123 = "RFC1123"
RFC1123_Z = "RFC1123Z"
RFC3339 = "RFC3339"
RFC3339_NANO = "RFC3339Nano"
KITCHEN = "Kitchen"
STAMP = "Stamp"
STAMP_MILLI = "StampMilli"
STAMP_MICRO = "StampMicro"
STAMP_NANO = "StampNano"
class ResourceType(NebEnum):
"""Defines a resource type in a nebulon infrastructure"""
Unknown = "Unknown"
"""The resource type is not known"""
Datacenter = "Datacenter"
"""A datacenter location information resource"""
Host = "Host"
"""A server or host resource"""
Disk = "Disk"
"""A physical drive or physical disk resource"""
Pod = "Pod"
"""A nPod resource"""
PodGroup = "PodGroup"
"""A group of nPods"""
Room = "Room"
"""A room or lab in a datacenter"""
Rack = "Rack"
"""A rack in a datacenter row"""
Row = "Row"
"""A row in a datacenter"""
Snapshot = "Snapshot"
"""A point-in-time checkpoint of a storage volume"""
SPU = "SPU"
"""A services processing unit"""
VM = "VM"
"""A virtual machine"""
Volume = "Volume"
"""A storage volume"""
NetworkInterface = "NetworkInterface"
"""A network interface"""
class PageInput:
"""Defines input properties for pagination
Allows specifying which page to return from the server for API calls that
support pagination. It allows to specify the page number and the quantity
of items to return in the page. Default values for a page are page number
``1`` and ``100`` items per page.
"""
def __init__(
self,
page: int = 1,
count: int = 100
):
"""Constructs a new PageInput object.
Allows specifying which page to return from the server for API calls
that support pagination. It allows to specify the page number and the
quantity of items to return in the page. Default values for a page
are page number ``1`` and ``100`` items per page.
:param page: The page number. Defaults to ``1``.
:type page: int, optional
:param count: The maximum number of items to include in a page.
Defaults to ``100`` items.
:type count: int, optional
"""
self.__page = page
self.__count = count
@property
def page(self) -> int:
"""Specifies the page number to return"""
return self.__page
@property
def count(self) -> int:
"""Specifies the maximum number of items to include per page"""
return self.__count
@property
def as_dict(self):
result = dict()
result["page"] = self.page
result["count"] = self.count
return result
|
#!/bin/sh
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
### Prepare the package and run `docker build`
set -e
SCRIPTDIR=`dirname "$0"`
cd "$SCRIPTDIR"
cd .. # now in the package.json directory
git log | head -n 3 > docker/gitlog.txt
npm pack
VERSION=`cat package.json | grep version | awk -F'"' '{ print $4; }'`
cp "google-cloud-gapic-generator-$VERSION.tgz" "docker/package.tgz"
cd docker
docker build -t gapic-generator-typescript .
# Cleanup
rm -f gitlog.txt package.tgz
|
<reponame>marvelperseus/Real-Estate-website-frontend
import React from 'react';
import { withStyles } from 'material-ui/styles';
import Input, { InputLabel } from 'material-ui/Input';
import Chip from 'material-ui/Chip';
import { FormControl } from 'material-ui/Form';
import Divider from 'material-ui/Divider';
import Typography from 'material-ui/Typography';
import Button from 'material-ui/Button';
import MenuItem from 'material-ui/Menu/MenuItem';
import Select from 'material-ui/Select';
import Grid from 'material-ui/Grid';
import TextField from 'material-ui/TextField';
import Dialog, {
DialogActions,
DialogContent,
DialogContentText,
DialogTitle,
} from 'material-ui/Dialog';
import CircularProgressbar from 'react-circular-progressbar';
import neighborhoods from '../constants/neighborhoods';
import submitDevelopment from '../effects/developments/submitDevelopment';
import deleteDevelopment from '../effects/developments/deleteDevelopment';
import getDevelopmentUploadsSignedURLS from '../effects/developments/getDevelopmentUploadsSignedURLS';
import uploadFile from '../effects/uploadFile';
import gql from 'graphql-tag';
import { Query } from 'react-apollo';
const MAPBOX_TOKEN =
'<KEY>';
const petPolicySelectItems = [
{ label: 'Cats Allowed' },
{ label: 'Dogs Allowed' },
{ label: 'Cats and Dogs Allowed' },
{ label: 'Case by Case' },
{ label: 'No Pets Allowed' },
];
const listingCategorySelectItems = [
{ label: 'Residential' },
{ label: 'Commercial' },
];
const ResidentialSelectItems = [
{ label: 'Multi Family' },
{ label: 'Mixed Use' },
{ label: 'Condo' },
{ label: 'Cooperative' },
{ label: 'Town House' },
{ label: 'Apartment' },
{ label: 'Condo-op' },
{ label: 'Retail Condo' },
{ label: 'Room' },
{ label: 'Loft Apartments' },
];
const CommercialSelectItems = [
{ label: 'Mixed Use' },
{ label: 'Multi Family' },
{ label: 'Warehouse' },
{ label: 'Land/Development Site' },
];
const listingTypeSelectItems = [
{ label: 'Elevator' },
{ label: 'Walk Up' },
{ label: 'Mid Rise' },
{ label: 'Low Rise' },
{ label: 'High Rise' },
{ label: '1 Family' },
{ label: '2 Family' },
{ label: '3 Family' },
{ label: 'Garden Style' },
];
const styles = {
map: {
height: '300px',
},
dialog: {
width: '800px',
maxWidth: '800px',
},
progressBarWrapper: {
display: 'flex',
flexDirection: 'column',
justifyContent: 'center',
alignItems: 'center',
width: '100%',
},
progressBar: {
width: '25%',
},
progressBarExplanation: {
marginTop: '20px',
fontSize: '1.1rem',
},
};
@withStyles(styles)
class SubmitDevelopmentDialogBox extends React.Component {
constructor() {
super();
this.state = {
name: '',
headline: '',
subheadline: '',
description: '',
website: '',
category: '',
ownership: '',
type: '',
petPolicy: '',
floors: '',
unitCount: '',
image: null,
builderimage: null,
address: '',
builderlogos: [],
coordinates: [],
neighborhood: '',
borough: '',
region: '',
agents: [],
isUploadingFile: false,
uploadingFileData: null,
submittingFormToServer: false,
};
}
componentDidMount() {
if (this.props.isView) {
const {
image,
builderimage,
builderlogos,
...data
} = this.props.newdevelopment;
this.setState(data);
}
}
handleChange = name => event => {
this.setState({ [name]: event.target.value });
};
handleOnResult = event => {
this.setState({
coordinates: event.result.geometry.coordinates,
});
};
handleSubmit = event => {
event.preventDefault();
const { image, builderimage, builderlogos } = this.state;
const uploads = [];
const files = [];
if (image) {
const uploadImage = {
itemName: `image`,
fileName: image.name,
fileType: image.type,
};
uploads.push(uploadImage);
files.push(image);
}
if (builderimage) {
uploads.push({
itemName: `builder image`,
fileName: builderimage.name,
fileType: builderimage.type,
});
files.push(builderimage);
}
if (builderlogos.length) {
Object.keys(builderlogos).forEach(key => {
uploads.push({
itemName: `builder logo ${key}`,
fileName: builderlogos[key].name,
fileType: builderlogos[key].type,
});
files.push(builderlogos[key]);
});
}
let newdevelopmentID;
if (this.props.isView)
newdevelopmentID = this.props.newdevelopment.newdevelopmentID;
getDevelopmentUploadsSignedURLS(uploads, newdevelopmentID).then(
async response => {
this.setState({ submittingFormToServer: true });
const { items, listingID } = response;
let counter = 0;
for (let item of items) {
this.setState({
isUploadingFile: true,
uploadingFileData: item,
loadedPercent: 0,
});
await uploadFile({
file: files[counter],
url: item.signedURL,
onUploadProgress: progressEvent => {
// Do whatever you want with the native progress event
const loadedPercent =
progressEvent.loaded / progressEvent.total * 100;
this.setState({ loadedPercent: Math.round(loadedPercent) });
},
});
this.setState({ isUploadingFile: false });
counter++;
}
submitDevelopment(this.state, listingID).then(responseObject => {
this.setState({ submittingFormToServer: false });
this.props.onClose();
});
}
);
};
setBuilderImage = fileObject => {
this.setState({ builderimage: fileObject });
};
setImage = fileObject => {
this.setState({ image: fileObject });
};
setBuilderLogos = filesObject => {
this.setState({ builderlogos: filesObject });
};
render() {
const {
submittingFormToServer,
isUploadingFile,
uploadingFileData,
loadedPercent,
} = this.state;
const { open, onClickOpen, onClose } = this.props;
const { classes } = this.props;
if (isUploadingFile) {
console.log(isUploadingFile, uploadingFileData, loadedPercent);
}
return (
<Dialog
open={open}
onClose={onClose}
aria-labelledby="form-dialog-title"
classes={{ paper: classes.dialog }}
>
<DialogTitle id="form-dialog-title">Submit New Development</DialogTitle>
<DialogContent>
<SubmitDevelopmentForm
{...this.state}
onCoordinatesChange={this.handleOnResult}
onChange={this.handleChange}
setImage={this.setImage}
setBuilderLogos={this.setBuilderLogos}
setBuilderImage={this.setBuilderImage}
/>
{isUploadingFile ? (
<div className={classes.progressBarWrapper}>
<CircularProgressbar
className={classes.progressBar}
percentage={loadedPercent}
styles={{
path: {
stroke: `rgba(62, 152, 199, ${loadedPercent / 100})`,
},
}}
/>
<div className={classes.progressBarExplanation}>
{'Uploading ' + uploadingFileData.itemName + ' ...'}
</div>
</div>
) : (
<span />
)}
</DialogContent>
{submittingFormToServer || (
<DialogActions>
<Button onClick={onClose} color="primary">
Cancel
</Button>
{this.props.isView && (
<Button
color="secondary"
onClick={() => {
deleteDevelopment(this.props.newdevelopment.newdevelopmentID);
onClose();
}}
>
Delete
</Button>
)}
<Button onClick={this.handleSubmit} color="primary">
Submit
</Button>
</DialogActions>
)}
</Dialog>
);
}
}
const formStyles = theme => ({
fileInput: {
display: 'none',
},
formControl: {
margin: theme.spacing.unit,
width: '100%',
minWidth: 120,
maxWidth: 300,
},
chips: {
display: 'flex',
flexWrap: 'wrap',
},
chip: {
margin: theme.spacing.unit / 4,
},
});
const ITEM_HEIGHT = 48;
const ITEM_PADDING_TOP = 8;
const MenuProps = {
PaperProps: {
style: {
maxHeight: ITEM_HEIGHT * 4.5 + ITEM_PADDING_TOP,
width: 250,
},
},
};
const names = [
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
'<NAME>',
];
function getStyles(name, that) {
return {
fontWeight:
that.props.agents.indexOf(name) === -1
? that.props.theme.typography.fontWeightRegular
: that.props.theme.typography.fontWeightMedium,
};
}
const agentsQuery = gql`
query agents {
agents {
uuid
lastName
}
}
`;
@withStyles(formStyles, { withTheme: true })
class SubmitDevelopmentForm extends React.Component {
constructor() {
super();
this.map = React.createRef();
}
componentDidMount() {
const handleOnResult = this.props.onCoordinatesChange;
const mapboxgl = require('mapbox-gl');
const MapboxGeocoder = require('mapbox-gl-geocoder');
mapboxgl.accessToken = MAPBOX_TOKEN;
const coordinates = [-73.96169, 40.69758];
var map = new mapboxgl.Map({
container: this.map, // Container ID
style: 'mapbox://styles/mapbox/streets-v11', // Map style to use
center: coordinates,
zoom: 10, // Starting zoom level
});
var marker = new mapboxgl.Marker() // Initialize a new marker
.setLngLat(coordinates) // Marker [lng, lat] coordinates
.addTo(map); // Add the marker to the map
var geocoder = new MapboxGeocoder({
// Initialize the geocoder
accessToken: mapboxgl.accessToken, // Set the access token
placeholder: 'Select location in New York', // Placeholder text for the search bar
proximity: {
longitude: coordinates[0],
latitude: coordinates[1],
}, // Coordinates of New York
});
// Add the geocoder to the map
map.addControl(geocoder);
// After the map style has loaded on the page,
// add a source layer and default styling for a single point
map.on('load', function() {
map.addSource('single-point', {
type: 'geojson',
data: {
type: 'FeatureCollection',
features: [],
},
});
map.addLayer({
id: 'point',
source: 'single-point',
type: 'circle',
paint: {
'circle-radius': 10,
'circle-color': '#448ee4',
},
});
// Listen for the `result` event from the Geocoder
// `result` event is triggered when a user makes a selection
// Add a marker at the result's coordinates
geocoder.on('result', function(ev) {
map.getSource('single-point').setData(ev.result.geometry);
handleOnResult(ev);
});
});
}
prepareSelectItems = () => {
const { category, neighborhood, borough } = this.props;
const ownerships =
category === 'Residential'
? ResidentialSelectItems
: category === 'Commercial' ? CommercialSelectItems : [];
const coordinates = this.props.coordinates.join(',');
const selectedNeighborhood = neighborhoods.find(
n => n.title === neighborhood
);
const boroughs = neighborhood === '' ? [] : selectedNeighborhood.value;
return { ownerships, boroughs };
};
render() {
const {
name,
headline,
subheadline,
description,
website,
category,
ownership,
type,
petPolicy,
floors,
unitCount,
address,
region,
borough,
neighborhood,
image,
coordinates,
agents,
classes,
onChange,
setImage,
setBuilderLogos,
setBuilderImage,
submittingFormToServer,
} = this.props;
const { ownerships, boroughs } = this.prepareSelectItems();
return (
<form
style={{
display: submittingFormToServer ? 'none' : undefined,
}}
>
<Grid container spacing={24}>
<Grid item sm={12} xs={12}>
<TextField
autoFocus
margin="dense"
id="name"
value={name}
onChange={onChange('name')}
label="Name"
type="text"
fullWidth
/>
</Grid>
<Grid item sm={6} xs={12}>
<TextField
margin="dense"
id="headline"
value={headline}
onChange={onChange('headline')}
label="Headline"
type="text"
fullWidth
/>
</Grid>
<Grid item sm={6} xs={12}>
<TextField
margin="dense"
id="subheadline"
value={subheadline}
onChange={onChange('subheadline')}
label="Sub Headline"
type="text"
fullWidth
/>
</Grid>
<Grid item sm={12}>
<TextField
label="Description"
value={description}
onChange={onChange('description')}
multiline
rowsMax="4"
margin="normal"
fullWidth
/>
</Grid>
<Grid item sm={6}>
<TextField
margin="dense"
label="Website"
value={website}
onChange={onChange('website')}
type="text"
fullWidth
/>
</Grid>
<Grid item sm={6} xs={12}>
<Query
query={agentsQuery}
ssr={false}
fetchPolicy="cache-and-network"
>
{({ loading, error, data }) => {
if (loading) return <p>Loading</p>;
if (error) return <p>error</p>;
if (data) {
const fetchedAgents = data.agents;
return (
<FormControl className={classes.formControl}>
<InputLabel htmlFor="select-multiple-agent">
Agents
</InputLabel>
<Select
multiple
value={agents}
onChange={onChange('agents')}
input={<Input id="select-multiple-agent" />}
renderValue={selected => (
<div className={classes.chips}>
{selected.map(value => (
<Chip
key={value}
label={
fetchedAgents.find(a => a.uuid === value)
.lastName
}
className={classes.chip}
/>
))}
</div>
)}
MenuProps={MenuProps}
>
{fetchedAgents.map(agent => (
<MenuItem
key={agent.uuid}
value={agent.uuid}
style={getStyles(agent.uuid, this)}
>
{agent.lastName}
</MenuItem>
))}
</Select>
</FormControl>
);
}
}}
</Query>
</Grid>
<Grid item xs={12}>
<input
type="file"
className={classes.fileInput}
id="image-input"
onChange={e => setImage(e.target.files[0])}
/>
<label htmlFor="image-input">
<Button
variant="raised"
color="secondary"
aria-label="add"
component="span"
>
Upload Image
</Button>
</label>
<Divider />
</Grid>
<Grid item xs={12}>
<input
type="file"
className={classes.fileInput}
id="builder-image-input"
onChange={e => setBuilderImage(e.target.files[0])}
/>
<label htmlFor="builder-image-input">
<Button
variant="raised"
color="secondary"
aria-label="add"
component="span"
>
Upload Builder Image
</Button>
</label>
<Divider />
</Grid>
<Grid item xs={12}>
<input
type="file"
className={classes.fileInput}
id="builder-logos-input"
onChange={e => setBuilderLogos(e.target.files)}
multiple
/>
<label htmlFor="builder-logos-input">
<Button
variant="raised"
color="secondary"
aria-label="add"
component="span"
>
Upload Builder Logos
</Button>
</label>
<Divider />
</Grid>
<Grid item sm={6} xs={12}>
<TextField
select
label="Category"
value={category}
onChange={onChange('category')}
margin="normal"
fullWidth
>
{listingCategorySelectItems.map(option => (
<option key={option.label} value={option.label}>
{option.label}
</option>
))}
</TextField>
</Grid>
<Grid item sm={6} xs={12}>
<TextField
select
value={ownership}
onChange={onChange('ownership')}
label="Ownership"
margin="normal"
fullWidth
>
{ownerships.map(option => (
<option key={option.label} value={option.label}>
{option.label}
</option>
))}
</TextField>
</Grid>
<Grid item sm={6} xs={12}>
<TextField
select
label="Type"
value={type}
onChange={onChange('type')}
margin="normal"
fullWidth
>
{listingTypeSelectItems.map(option => (
<option key={option.label} value={option.label}>
{option.label}
</option>
))}
</TextField>
</Grid>
<Grid item sm={6} xs={12}>
<TextField
select
label="Pet Policy"
margin="normal"
value={petPolicy}
onChange={onChange('petPolicy')}
fullWidth
>
{petPolicySelectItems.map(option => (
<option key={option.label} value={option.label}>
{option.label}
</option>
))}
</TextField>
</Grid>
<Grid item sm={6} xs={12}>
<TextField
margin="dense"
id="floors"
value={floors}
onChange={onChange('floors')}
label="Floors"
type="text"
fullWidth
/>
</Grid>
<Grid item sm={6} xs={12}>
<TextField
margin="dense"
id="unitCount"
value={unitCount}
onChange={onChange('unitCount')}
label="Unit Count"
type="text"
fullWidth
/>
</Grid>
<Grid item sm={12}>
<TextField
margin="dense"
id="address"
value={address}
onChange={onChange('address')}
label="Address"
type="text"
fullWidth
/>
</Grid>
<Grid item sm={6} xs={12}>
<TextField
select
label="Region"
margin="normal"
value={region}
onChange={onChange('region')}
fullWidth
>
<option key={'NYC'} value={'NYC'}>
NYC
</option>
</TextField>
</Grid>
<Grid item sm={6} xs={12}>
<TextField
select
label="Neighborhood"
margin="normal"
value={neighborhood}
onChange={onChange('neighborhood')}
fullWidth
>
{region === 'NYC' &&
neighborhoods.map(n => (
<option key={n.title} value={n.title}>
{n.title}
</option>
))}
</TextField>
</Grid>
<Grid item sm={6} xs={12}>
<TextField
select
label="Boroughs"
margin="normal"
value={borough}
onChange={onChange('borough')}
fullWidth
>
{boroughs.map(n => (
<option key={n.value} value={n.value}>
{n.value}
</option>
))}
</TextField>
</Grid>
<Grid item sm={6} xs={12} />
<Grid item xs={12}>
<div style={{ height: '300px' }}>
<div
ref={el => (this.map = el)}
style={{ display: 'flex', height: '100%' }}
/>
</div>
</Grid>
<Grid item sm={6} xs={12}>
<TextField
margin="dense"
id="coordinates"
label="Coordinates"
value={coordinates}
type="text"
disabled
fullWidth
/>
</Grid>
</Grid>
</form>
);
}
}
export default SubmitDevelopmentDialogBox;
|
main_dir="/home/j/Insync/jmmunozp@usp.br/Google Drive - Shared with me/NitrogenResponsiveGenotypes/Salmon2/SalmonQuant"
test_dir="/home/j/BIOINFORMATICA/test_RNAseq"
ls "$main_dir" | grep Sample > ${test_dir}/samples2.txt
for i in $(cat ${test_dir}/samples2.txt)
do
mkdir -p "${test_dir}"/"$i"
head -n1000 "${main_dir}"/"${i}"/quant.sf > "${test_dir}"/"${i}"/quant.sf
echo "making file Sample_"$i""
done
|
<gh_stars>0
package uk.co.mruoc.json.mask.phone;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.jayway.jsonpath.JsonPath;
import uk.co.mruoc.json.mask.JsonMasker;
import uk.co.mruoc.json.mask.JsonPathConfig;
import java.util.Collection;
public class PhoneNumberJsonMasker extends JsonMasker {
public PhoneNumberJsonMasker(ObjectMapper mapper, Collection<JsonPath> jsonPaths) {
super(mapper, jsonPaths, new PhoneNumberMaskFunction(), JsonPathConfig.build());
}
}
|
<reponame>mpermar/debezium-incubator
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.connector.oracle.logminer;
import static org.fest.assertions.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import java.io.IOException;
import java.sql.SQLRecoverableException;
import java.time.Duration;
import java.time.LocalDateTime;
import java.util.HashSet;
import java.util.Set;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
import org.mockito.Mockito;
import io.debezium.connector.oracle.OracleConnectorConfig;
import io.debezium.connector.oracle.OracleDatabaseSchema;
import io.debezium.connector.oracle.junit.SkipTestDependingOnAdapterNameRule;
import io.debezium.connector.oracle.junit.SkipWhenAdapterNameIsNot;
import io.debezium.connector.oracle.junit.SkipWhenAdapterNameIsNot.AdapterName;
import io.debezium.relational.TableId;
@SkipWhenAdapterNameIsNot(value = AdapterName.LOGMINER)
public class SqlUtilsTest {
@Rule
public TestRule skipRule = new SkipTestDependingOnAdapterNameRule();
@Test
public void testStatements() {
SqlUtils.setRac(false);
String result = SqlUtils.addLogFileStatement("ADD", "FILENAME");
String expected = "BEGIN sys.dbms_logmnr.add_logfile(LOGFILENAME => 'FILENAME', OPTIONS => ADD);END;";
assertThat(expected.equals(result)).isTrue();
OracleDatabaseSchema schema = mock(OracleDatabaseSchema.class);
TableId table1 = new TableId("catalog", "schema", "table1");
TableId table2 = new TableId("catalog", "schema", "table2");
Set<TableId> tables = new HashSet<>();
Mockito.when(schema.tableIds()).thenReturn(tables);
result = SqlUtils.logMinerContentsQuery("DATABASE", "SCHEMA", schema);
expected = "SELECT SCN, SQL_REDO, OPERATION_CODE, TIMESTAMP, XID, CSF, TABLE_NAME, SEG_OWNER, OPERATION, USERNAME " +
"FROM V$LOGMNR_CONTENTS WHERE OPERATION_CODE in (1,2,3,5) AND SEG_OWNER = 'DATABASE' AND SCN >= ? AND SCN < ? " +
"OR (OPERATION_CODE IN (5,34) AND USERNAME NOT IN ('SYS','SYSTEM','SCHEMA')) " +
" OR (OPERATION_CODE IN (7,36))";
assertThat(result).isEqualTo(expected);
tables.add(table1);
tables.add(table2);
result = SqlUtils.logMinerContentsQuery("DATABASE", "SCHEMA", schema);
expected = "SELECT SCN, SQL_REDO, OPERATION_CODE, TIMESTAMP, XID, CSF, TABLE_NAME, SEG_OWNER, OPERATION, USERNAME " +
"FROM V$LOGMNR_CONTENTS WHERE OPERATION_CODE in (1,2,3,5) " +
"AND SEG_OWNER = 'DATABASE' AND table_name IN ('table1','table2') " +
"AND SCN >= ? AND SCN < ? OR (OPERATION_CODE IN (5,34) AND USERNAME NOT IN ('SYS','SYSTEM','SCHEMA')) " +
" OR (OPERATION_CODE IN (7,36))";
assertThat(result).isEqualTo(expected);
result = SqlUtils.databaseSupplementalLoggingMinCheckQuery();
expected = "SELECT 'KEY', SUPPLEMENTAL_LOG_DATA_MIN FROM V$DATABASE";
assertThat(result).isEqualTo(expected);
result = SqlUtils.tableSupplementalLoggingCheckQuery(new TableId(null, "s", "t"));
expected = "SELECT 'KEY', LOG_GROUP_TYPE FROM ALL_LOG_GROUPS WHERE OWNER = 's' AND TABLE_NAME = 't'";
assertThat(result).isEqualTo(expected);
result = SqlUtils.startLogMinerStatement(10L, 20L, OracleConnectorConfig.LogMiningStrategy.ONLINE_CATALOG, true);
expected = "BEGIN sys.dbms_logmnr.start_logmnr(startScn => '10', endScn => '20', " +
"OPTIONS => DBMS_LOGMNR.DICT_FROM_ONLINE_CATALOG + DBMS_LOGMNR.CONTINUOUS_MINE + DBMS_LOGMNR.NO_ROWID_IN_STMT);END;";
assertThat(result).isEqualTo(expected);
result = SqlUtils.startLogMinerStatement(10L, 20L, OracleConnectorConfig.LogMiningStrategy.CATALOG_IN_REDO, false);
expected = "BEGIN sys.dbms_logmnr.start_logmnr(startScn => '10', endScn => '20', " +
"OPTIONS => DBMS_LOGMNR.DICT_FROM_REDO_LOGS + DBMS_LOGMNR.DDL_DICT_TRACKING + DBMS_LOGMNR.NO_ROWID_IN_STMT);END;";
assertThat(result).isEqualTo(expected);
result = SqlUtils.truncateTableStatement("table_name");
expected = "TRUNCATE TABLE table_name";
assertThat(result).isEqualTo(expected);
result = SqlUtils.diffInDaysQuery(123L);
expected = "select sysdate - CAST(scn_to_timestamp(123) as date) from dual";
assertThat(expected.equals(result)).isTrue();
result = SqlUtils.diffInDaysQuery(null);
assertThat(result).isNull();
result = SqlUtils.bulkHistoryInsertStmt("table_name");
expected = "INSERT /*+ APPEND */ INTO table_name SELECT * FROM LOG_MINING_TEMP";
assertThat(result).isEqualTo(expected);
result = SqlUtils.redoLogStatusQuery();
expected = "SELECT F.MEMBER, R.STATUS FROM V$LOGFILE F, V$LOG R WHERE F.GROUP# = R.GROUP# ORDER BY 2";
assertThat(expected.equals(result)).isTrue();
result = SqlUtils.switchHistoryQuery();
expected = "SELECT 'TOTAL', COUNT(1) FROM V$ARCHIVED_LOG WHERE FIRST_TIME > TRUNC(SYSDATE)" +
" AND DEST_ID IN (SELECT DEST_ID FROM V$ARCHIVE_DEST_STATUS WHERE STATUS='VALID' AND TYPE='LOCAL')";
assertThat(result).isEqualTo(expected);
result = SqlUtils.currentRedoNameQuery();
expected = "SELECT F.MEMBER FROM V$LOG LOG, V$LOGFILE F WHERE LOG.GROUP#=F.GROUP# AND LOG.STATUS='CURRENT'";
assertThat(result).isEqualTo(expected);
result = SqlUtils.databaseSupplementalLoggingAllCheckQuery();
expected = "SELECT 'KEY', SUPPLEMENTAL_LOG_DATA_ALL FROM V$DATABASE";
assertThat(result).isEqualTo(expected);
result = SqlUtils.currentScnQuery();
expected = "SELECT CURRENT_SCN FROM V$DATABASE";
assertThat(result).isEqualTo(expected);
result = SqlUtils.oldestFirstChangeQuery(Duration.ofHours(0L));
expected = "SELECT MIN(FIRST_CHANGE#) FROM (SELECT MIN(FIRST_CHANGE#) AS FIRST_CHANGE# FROM V$LOG UNION SELECT MIN(FIRST_CHANGE#) AS FIRST_CHANGE# FROM V$ARCHIVED_LOG)";
assertThat(result).isEqualTo(expected);
result = SqlUtils.allOnlineLogsQuery();
expected = "SELECT MIN(F.MEMBER) AS FILE_NAME, L.NEXT_CHANGE# AS NEXT_CHANGE, F.GROUP#, L.FIRST_CHANGE# AS FIRST_CHANGE " +
" FROM V$LOG L, V$LOGFILE F " +
" WHERE F.GROUP# = L.GROUP# AND L.NEXT_CHANGE# > 0 " +
" GROUP BY F.GROUP#, L.NEXT_CHANGE#, L.FIRST_CHANGE# ORDER BY 3";
assertThat(result).isEqualTo(expected);
result = SqlUtils.tableExistsQuery("table_name");
expected = "SELECT '1' AS ONE FROM USER_TABLES WHERE TABLE_NAME = 'table_name'";
assertThat(result).isEqualTo(expected);
result = SqlUtils.logMiningHistoryDdl("table_name");
expected = "create TABLE table_name(" +
"row_sequence NUMBER(19,0), " +
"captured_scn NUMBER(19,0), " +
"table_name VARCHAR2(30 CHAR), " +
"seg_owner VARCHAR2(30 CHAR), " +
"operation_code NUMBER(19,0), " +
"change_time TIMESTAMP(6), " +
"transaction_id VARCHAR2(50 CHAR), " +
"csf NUMBER(19,0), " +
"redo_sql VARCHAR2(4000 CHAR)" +
") nologging";
assertThat(result).isEqualTo(expected);
result = SqlUtils.archiveLogsQuery(10L, Duration.ofHours(0L));
expected = "SELECT NAME AS FILE_NAME, NEXT_CHANGE# AS NEXT_CHANGE, FIRST_CHANGE# AS FIRST_CHANGE FROM V$ARCHIVED_LOG " +
"WHERE NAME IS NOT NULL AND ARCHIVED = 'YES' " +
"AND STATUS = 'A' AND NEXT_CHANGE# > 10 ORDER BY 2";
assertThat(result).isEqualTo(expected);
result = SqlUtils.archiveLogsQuery(10L, Duration.ofHours(1L));
expected = "SELECT NAME AS FILE_NAME, NEXT_CHANGE# AS NEXT_CHANGE, FIRST_CHANGE# AS FIRST_CHANGE FROM V$ARCHIVED_LOG " +
" WHERE NAME IS NOT NULL AND FIRST_TIME >= SYSDATE - (1/24) AND ARCHIVED = 'YES' " +
" AND STATUS = 'A' AND NEXT_CHANGE# > 10 ORDER BY 2";
assertThat(result).isEqualTo(expected);
result = SqlUtils.deleteLogFileStatement("file_name");
expected = "BEGIN SYS.DBMS_LOGMNR.REMOVE_LOGFILE(LOGFILENAME => 'file_name');END;";
assertThat(result).isEqualTo(expected);
result = SqlUtils.getHistoryTableNamesQuery();
expected = "SELECT TABLE_NAME, '1' FROM USER_TABLES WHERE TABLE_NAME LIKE 'LM_HIST_%'";
assertThat(result).isEqualTo(expected);
result = SqlUtils.dropHistoryTableStatement("table_name");
expected = "DROP TABLE TABLE_NAME PURGE";
assertThat(result).isEqualTo(expected);
}
@Test
public void shouldParseHistoryTableNames() {
String name = SqlUtils.buildHistoryTableName(LocalDateTime.now());
long diff = SqlUtils.parseRetentionFromName(name);
assertThat(diff).isEqualTo(0);
name = SqlUtils.buildHistoryTableName(LocalDateTime.now().minusHours(10));
diff = SqlUtils.parseRetentionFromName(name);
assertThat(diff).isEqualTo(10);
diff = SqlUtils.parseRetentionFromName(SqlUtils.LOGMNR_HISTORY_TABLE_PREFIX + "10_2_4_5");
assertThat(diff).isEqualTo(0);
}
@Test
public void shouldDetectConnectionProblems() {
assertThat(SqlUtils.connectionProblem(new IOException("connection"))).isTrue();
assertThat(SqlUtils.connectionProblem(new SQLRecoverableException("connection"))).isTrue();
assertThat(SqlUtils.connectionProblem(new Throwable())).isFalse();
assertThat(SqlUtils.connectionProblem(new Exception("ORA-03135 problem"))).isTrue();
assertThat(SqlUtils.connectionProblem(new Exception("ORA-12543 problem"))).isTrue();
assertThat(SqlUtils.connectionProblem(new Exception("ORA-00604 problem"))).isTrue();
assertThat(SqlUtils.connectionProblem(new Exception("ORA-01089 problem"))).isTrue();
assertThat(SqlUtils.connectionProblem(new Exception("ORA-00600 problem"))).isTrue();
assertThat(SqlUtils.connectionProblem(new Exception("ORA-99999 problem"))).isFalse();
assertThat(SqlUtils.connectionProblem(new Exception("NO MORE DATA TO READ FROM SOCKET problem"))).isTrue();
assertThat(SqlUtils.connectionProblem(new Exception("12543 problem"))).isFalse();
}
}
|
//
const electron = require ('electron');
const { app, BrowserWindow, dialog, globalShortcut, ipcMain, Menu, shell } = electron;
//
const remoteMain = require ('@electron/remote/main');
//
remoteMain.initialize ();
//
let mainWindow = null;
//
const gotTheLock = app.requestSingleInstanceLock ();
if (!gotTheLock)
{
app.quit ();
}
else
{
app.on
(
'second-instance',
(event, commandLine, workingDirectory) =>
{
if (mainWindow)
{
if (mainWindow.isMinimized ())
{
mainWindow.restore ();
}
mainWindow.show ();
}
}
);
//
// Share settings with the renderer process
global.settings = require ('./settings.json');
//
if (!settings.accelerated)
{
app.disableHardwareAcceleration ();
}
//
const fs = require ('fs');
const os = require ('os');
const path = require ('path');
//
const appPackaged = app.isPackaged;
//
const appName = app.name;
const appVersion = app.getVersion ();
const appDate = (appPackaged ? fs.statSync (process.resourcesPath).ctime : new Date ()).toISOString ();
//
let appDirname = app.getAppPath ();
let unpackedDirname = `${appDirname}.unpacked`;
if (!fs.existsSync (unpackedDirname))
{
unpackedDirname = appDirname;
};
//
function showAboutBox (menuItem, browserWindow, event)
{
let options =
{
type: 'info',
message: `${appName}`,
detail: `${settings.description}\n${settings.copyright}\n\nVersion: ${appVersion}\nDate: ${appDate}`,
buttons: [ "OK" ]
};
dialog.showMessageBox ((process.platform === 'darwin') ? null : browserWindow, options);
}
//
let licenseWindow = null;
//
function showLicense (menuItem, browserWindow, event)
{
if (browserWindow === mainWindow)
{
if (!licenseWindow)
{
licenseWindow = new BrowserWindow
(
{
title: `License | ${appName}`,
width: 384,
height: (process.platform !== 'darwin') ? 480 : 540,
minimizable: false,
maximizable: false,
resizable: false,
fullscreenable: false,
parent: browserWindow,
modal: true,
show: false,
webPreferences:
{
contextIsolation: false,
devTools: false
}
}
);
if (process.platform !== 'darwin')
{
licenseWindow.removeMenu ();
}
licenseWindow.loadFile (path.join (__dirname, 'license-index.html'));
licenseWindow.once ('ready-to-show', () => { licenseWindow.show (); });
licenseWindow.on ('close', () => { licenseWindow = null; });
}
else
{
licenseWindow.show ();
}
}
}
//
function getSystemInfo ()
{
const infos =
[
"",
"-- Application --",
"",
[ "Name", appName ],
[ "Version", appVersion ],
[ "Date", appDate ],
"",
[ "Locale", app.getLocale () ],
[ "Packaged", app.isPackaged ],
"",
"-- Framework --",
"",
[ "System Version", process.getSystemVersion () ],
[ "Platform", process.platform ],
[ "Architecture", process.arch ],
"",
[ "Default App", process.defaultApp || false ],
[ "Mac App Store App", process.mas || false ],
[ "Windows Store App", process.windowsStore || false ],
"",
[ "Electron Version", process.versions.electron ],
[ "Node Version", process.versions.node ],
[ "V8 Version", process.versions.v8 ],
[ "Chromium Version", process.versions.chrome ],
[ "ICU Version", process.versions.icu ],
[ "Unicode Version", process.versions.unicode ],
// [ "CLDR Version", process.versions.cldr ],
// [ "Time Zone Version", process.versions.tz ],
"",
"-- Operating System --",
"",
[ "OS Type", os.type () ],
[ "OS Platform", os.platform () ],
[ "OS Release", os.release () ],
"",
[ "CPU Architecture", os.arch () ],
[ "CPU Endianness", os.endianness () ],
[ "CPU Logical Cores", os.cpus ().length ],
[ "CPU Model", os.cpus ()[0].model ],
[ "CPU Speed (MHz)", os.cpus ()[0].speed ]
];
return infos.map (info => (Array.isArray (info) ? `${info[0]}: ${info[1]}` : info) + "\n").join ("");
}
//
let systemInfoWindow = null;
//
function showSystemInfo (menuItem, browserWindow, event)
{
if (browserWindow === mainWindow)
{
if (!systemInfoWindow)
{
systemInfoWindow = new BrowserWindow
(
{
title: `System Info | ${appName}`,
width: 480,
height: settings.window.defaultHeight,
minimizable: false,
maximizable: false,
resizable: false,
fullscreenable: false,
parent: browserWindow,
modal: true,
show: false,
webPreferences:
{
contextIsolation: false,
devTools: false
}
}
);
if (process.platform !== 'darwin')
{
systemInfoWindow.removeMenu ();
}
systemInfoWindow.loadFile (path.join (__dirname, 'system-info-index.html'));
const script = `document.body.querySelector ('.system-info').value = ${JSON.stringify (getSystemInfo ())};`;
systemInfoWindow.webContents.on ('dom-ready', () => { systemInfoWindow.webContents.executeJavaScript (script); });
systemInfoWindow.once ('ready-to-show', () => { systemInfoWindow.show (); });
systemInfoWindow.on ('close', () => { systemInfoWindow = null; });
}
else
{
systemInfoWindow.show ();
}
}
}
//
let defaultWidth;
let defaultHeight;
//
function resetWindow ()
{
if (mainWindow.isFullScreen ())
{
shell.beep ();
}
else
{
if (mainWindow.isMaximized ())
{
mainWindow.unmaximize ();
}
mainWindow.setSize (defaultWidth, defaultHeight);
mainWindow.center ();
if (mainWindow.isMinimized ())
{
mainWindow.restore ();
}
}
}
//
const darwinAppMenu =
{
label: appName,
submenu:
[
{ label: `About ${appName}...`, click: showAboutBox },
{ type: 'separator' },
{ role: 'services', submenu: [ ] },
{ type: 'separator' },
{ role: 'hide' },
{ role: 'hideothers' },
{ role: 'unhide' },
{ type: 'separator' },
{ role: 'quit' }
]
};
const appMenu =
{
label: settings.shortAppName,
submenu:
[
{ role: 'quit' }
]
};
const editMenu =
{
label: "Edit",
submenu:
[
{ role: 'undo' },
{ role: 'redo' },
{ type: 'separator' },
{ role: 'cut' },
{ role: 'copy' },
{ role: 'paste' },
{ role: 'delete' },
{ type: 'separator' },
{ role: 'selectall' }
]
};
const viewMenu =
{
label: "View",
submenu:
[
{ label: "Toggle Navigation Sidebar", accelerator: 'CommandOrControl+N', click: () => { mainWindow.webContents.send ('toggle-sidebar'); } },
{ label: "Toggle Categories", accelerator: 'CommandOrControl+K', click: () => { mainWindow.webContents.send ('toggle-categories'); } },
{ type: 'separator' },
{ label: "Scroll to Top", accelerator: 'CommandOrControl+T', click: () => { mainWindow.webContents.send ('scroll-to-top'); } },
{ label: "Scroll to Bottom", accelerator: 'CommandOrControl+B', click: () => { mainWindow.webContents.send ('scroll-to-bottom'); } },
{ type: 'separator' },
{ label: "Actual Size", accelerator: 'CommandOrControl+0', click: () => { mainWindow.webContents.send ('reset-zoom'); } },
{ label: "Zoom In", accelerator: 'CommandOrControl+Plus', click: () => { mainWindow.webContents.send ('zoom-in'); } },
{ label: "Zoom Out", accelerator: 'CommandOrControl+-', click: () => { mainWindow.webContents.send ('zoom-out'); } },
{ type: 'separator' },
{ role: 'togglefullscreen' }
]
};
const developerMenu =
{
label: "Developer",
submenu:
[
{ role: 'reload' },
{ role: 'toggledevtools' },
{ type: 'separator' },
{ label: "Open User Data Directory", click: () => { shell.openPath (app.getPath ('userData')); } },
{ label: "Open Temporary Directory", click: () => { shell.openPath (app.getPath ('temp')); } },
{ type: 'separator' },
{ label: "Show Executable File", click: () => { shell.showItemInFolder (app.getPath ('exe')); } }
]
};
const darwinWindowMenu =
{
role: 'window',
submenu:
[
{ role: 'close' },
{ role: 'minimize' },
{ role: 'zoom' },
{ type: 'separator' },
{ label: "Reset to Default", accelerator: 'CommandOrControl+D', click: () => { resetWindow (); } },
{ type: 'separator' },
{ role: 'front' }
]
};
const windowMenu =
{
label: "Window",
submenu:
[
{ role: 'minimize' },
{ role: 'close' },
{ type: 'separator' },
{ label: "Reset to Default", accelerator: 'CommandOrControl+D', click: () => { resetWindow (); } }
]
};
const darwinHelpMenu =
{
role: 'help',
submenu:
[
{ label: "License...", click: showLicense },
{ label: "System Info...", click: showSystemInfo },
{ type: 'separator' },
{ label: settings.repository.label, click: () => { shell.openExternal (settings.repository.URL); } },
{ label: settings.releases.label, click: () => { shell.openExternal (settings.releases.URL); } }
]
};
const helpMenu =
{
label: 'Help',
submenu:
[
{ label: "About...", click: showAboutBox },
{ label: "License...", click: showLicense },
{ label: "System Info...", click: showSystemInfo },
{ type: 'separator' },
{ label: settings.repository.label, click: () => { shell.openExternal (settings.repository.URL); } },
{ label: settings.releases.label, click: () => { shell.openExternal (settings.releases.URL); } }
]
};
//
let menuTemplate = [ ];
menuTemplate.push ((process.platform === 'darwin') ? darwinAppMenu : appMenu);
menuTemplate.push (editMenu);
menuTemplate.push (viewMenu);
if (settings.unitsMenu)
{
menuTemplate.push ({ label: settings.unitsName.replace (/&/g, "&&"), submenu: [ ] });
}
if ((!appPackaged) || settings.developerFeatures)
{
menuTemplate.push (developerMenu);
}
menuTemplate.push ((process.platform === 'darwin') ? darwinWindowMenu : windowMenu);
menuTemplate.push ((process.platform === 'darwin') ? darwinHelpMenu : helpMenu);
//
let menu;
//
function updateUnitsMenu (unitNames, currentUnitName)
{
for (let menuTemplateItem of menuTemplate)
{
if (menuTemplateItem["label"] === settings.unitsName.replace (/&/g, "&&"))
{
menuTemplateItem["submenu"] = [ ];
for (let unitName of unitNames)
{
menuTemplateItem["submenu"].push
(
{
label: unitName.replace (/&/g, "&&"),
type: 'radio',
checked: (unitName === currentUnitName),
click: () => { mainWindow.webContents.send ('select-unit', unitName); },
}
);
}
menu = Menu.buildFromTemplate (menuTemplate);
Menu.setApplicationMenu (menu);
break;
}
}
}
//
function syncUnitsMenu (unitName)
{
for (let menuItem of menu.items)
{
if (menuItem.label === settings.unitsName.replace (/&/g, "&&"))
{
let submenu = menuItem.submenu;
for (let submenuItem of submenu.items)
{
if (submenuItem.label === unitName.replace (/&/g, "&&"))
{
submenuItem.checked = true;
}
}
}
}
}
//
function onAppReady ()
{
menu = Menu.buildFromTemplate (menuTemplate);
Menu.setApplicationMenu (menu);
//
const Storage = require ('./lib/storage.js');
const mainStorage = new Storage ('main-preferences');
//
const { screen } = electron;
let workAreaWidth = screen.getPrimaryDisplay ().workArea.width;
let workAreaHeight = screen.getPrimaryDisplay ().workArea.height;
//
defaultWidth = settings.window.largerDefaultWidth;
defaultHeight = settings.window.largerDefaultHeight;
if ((defaultWidth > workAreaWidth) || (defaultHeight > workAreaHeight))
{
defaultWidth = settings.window.defaultWidth;
defaultHeight = settings.window.defaultHeight;
}
//
const defaultPrefs =
{
windowBounds:
{
width: defaultWidth,
height: defaultHeight
}
};
let prefs = mainStorage.get (defaultPrefs);
let windowBounds = prefs.windowBounds;
//
const windowOptions =
{
center: true,
x: windowBounds.x,
y: windowBounds.y,
width: windowBounds.width,
height: windowBounds.height,
minWidth: settings.window.minWidth,
minHeight: settings.window.minHeight,
backgroundColor: settings.window.backgroundColor,
show: !settings.window.deferredShow,
webPreferences:
{
contextIsolation: false,
nodeIntegration: true,
spellcheck: false
}
};
if (process.platform === 'linux')
{
windowOptions.icon = path.join (__dirname, 'icons', 'icon-256.png');
}
mainWindow = new BrowserWindow (windowOptions);
//
remoteMain.enable (mainWindow.webContents);
//
mainWindow.loadFile (path.join (__dirname, 'renderer', 'index.html'));
//
mainWindow.webContents.on ('new-window', (event) => { event.preventDefault (); }); // Prevent openening of a new window by window.open ()
mainWindow.webContents.on ('will-navigate', (event) => { event.preventDefault (); }); // Inhibit drag-and-drop of URL on window
//
let inConfirmQuitDialog = false;
//
mainWindow.on
(
'close',
(event) =>
{
if (settings.confirmQuit)
{
if (inConfirmQuitDialog)
{
event.preventDefault ();
}
else
{
inConfirmQuitDialog = true;
let choice = dialog.showMessageBoxSync
(
(process.platform === 'darwin') ? null : mainWindow,
{
type: 'question',
buttons: [ "Yes", "No" ],
defaultId: 0,
title: "Confirm Quit",
message: "Are you sure you want to quit?"
}
);
inConfirmQuitDialog = false;
if (choice === 1)
{
event.preventDefault ();
}
else
{
mainStorage.set ({ windowBounds: mainWindow.getBounds () });
}
}
}
else
{
mainStorage.set ({ windowBounds: mainWindow.getBounds () });
}
}
);
//
mainWindow.once ('closed', () => { if (process.platform === 'darwin') { app.hide (); } app.quit (); });
//
if (settings.unitsMenu)
{
ipcMain.on
(
'update-units-menu',
(event, unitNames, currentUnitName) =>
{
updateUnitsMenu (unitNames, currentUnitName);
}
);
ipcMain.on ('sync-units-menu', (event, unitName) => { syncUnitsMenu (unitName); });
}
//
ipcMain.on ('show-window', () => { mainWindow.show (); });
//
if (settings.escapeExitsFullScreen)
{
ipcMain.on
(
'exit-full-screen',
() =>
{
if (mainWindow.isFullScreen ())
{
mainWindow.setFullScreen (false);
}
else
{
// shell.beep ();
}
}
);
}
//
if (settings.hotKey)
{
// Set hot key
globalShortcut.register (settings.hotKey, () => { mainWindow.show (); });
}
}
//
app.once ('ready', onAppReady);
}
//
|
def update_snippet(
data: schemas.snippet.SnippetData,
auth_token: str = fastapi.Depends(oath)
) -> schemas.snippet.SnippetResponse:
"""
update the code snippet with the specified id.
"""
try:
# Authenticate the user using the provided auth_token
user = authenticate_user(auth_token)
# Establish a session with the database using SQLAlchemy's Session context manager
with sql.database.Session() as session:
# Use the Token CRUD operations to update the code snippet with the specified ID using the session
with sql.crud.Token(session) as token_crud:
updated_snippet = token_crud.update_snippet(data, user)
return updated_snippet
except AuthenticationError as e:
# Handle authentication errors
raise HTTPException(status_code=401, detail="Authentication failed")
except DatabaseError as e:
# Handle database errors
raise HTTPException(status_code=500, detail="Database error occurred") |
import codecs
import csv
import datetime
import json
import time
from django.core.paginator import Paginator
from django.http import StreamingHttpResponse
from django.shortcuts import render, Http404, redirect, HttpResponse
from polls.models import Equipment, Data
from account.models import User
from pyecharts.charts import Line
import pyecharts.options as opts
every_page_data = 100
def index(request, id):
"""
设备主页
"""
if not request.session.get('is_login', None):
return redirect('/account/login/')
uid = request.session.get('uid', None)
user = User.objects.filter(id=uid)[
0] if User.objects.filter(id=uid) else None
equipment = Equipment.objects.filter(
id=id)[0] if Equipment.objects.filter(id=id) else None
if not equipment:
return Http404
datas = equipment.data_set.all()[:100][::-1]
dates = list(data.created_time.strftime('%Y-%m-%d %H:%M:%S')
for data in datas)
values = list(data.value for data in datas)
line = Line()
line.add_xaxis(dates)
line.add_yaxis(equipment.name, values, symbol_size=5,
is_hover_animation=False,
label_opts=opts.LabelOpts(is_show=False),
linestyle_opts=opts.LineStyleOpts(width=1.5),
is_smooth=True)
line.set_global_opts(
toolbox_opts=opts.ToolboxOpts(True, feature={
"dataZoom": {"yAxisIndex": "none"},
"restore": {},
"saveAsImage": {},
"dataView": {}
},),
yaxis_opts=opts.AxisOpts(name='最新100条数据'),
tooltip_opts=opts.TooltipOpts(True, trigger="axis"),
datazoom_opts=opts.DataZoomOpts(True, range_start=0, range_end=100)
)
is_admin = user.admin
content = {'linechart_recent_data': line.dump_options(),
'equipment': equipment,
'session': request.session, 'page_equipment': True, 'admin': is_admin}
return render(request, 'equipment/index.html', content)
def modify_equipment(request, id):
if not request.session.get('is_login', None):
return redirect('/account/login/')
uid = request.session.get('uid', None)
user = User.objects.filter(id=uid)[
0] if User.objects.filter(id=uid) else None
equipment = Equipment.objects.filter(
id=id)[0] if Equipment.objects.filter(id=id) else None
response = {
'Code': 0,
'Message': '未知错误!'
}
if not equipment:
response['Code'] = 111
response['Message'] = '未找到该设备!'
return HttpResponse(json.dumps(response))
if request.method == 'GET':
name = request.GET.get('name', None)
descript = request.GET.get('descript', None)
is_changed = True
if name != equipment.name:
if Equipment.objects.filter(name=name):
response['Code'] = 112
response['Message'] = '该设备已存在!'
is_changed = False
elif not user.admin:
response['Code'] = 102
response['Message'] = '权限不足!'
is_changed = False
if is_changed:
equipment.name = name
equipment.descript = descript
equipment.save()
response['Code'] = 100
response['Message'] = '保存成功!'
return HttpResponse(json.dumps(response))
def list_data(request, id):
"""/;
数据页面
"""
if not request.session.get('is_login', None):
return redirect('/account/login/')
uid = request.session.get('uid', None)
user = User.objects.filter(id=uid)[
0] if User.objects.filter(id=uid) else None
equipment = Equipment.objects.filter(
id=id)[0] if Equipment.objects.filter(id=id) else None
if not equipment:
return Http404
if request.method == 'POST' and user.admin:
del_list = request.POST.getlist('checkbox_data')
Data.objects.filter(id__in=del_list).delete()
try:
current_page = int(request.GET.get(
"page")) if request.GET.get("page") else 1
except:
current_page = 1
paginator = Paginator(equipment.data_set.all(), every_page_data)
if current_page < 0 or current_page > paginator.num_pages:
current_page = 1
content = {'session': request.session, 'equipment': equipment,
'page_list_data': True, 'datas': paginator.page(current_page), 'paginator': paginator.page(current_page)}
if user.admin:
return render(request, 'equipment/page_list_data_admin.html', content)
else:
return render(request, 'equipment/page_list_data.html', content)
class Echo:
"""An object that implements just the write method of the file-like
interface.
"""
def write(self, value):
"""Write the value by returning it, instead of storing in a buffer."""
return value
def download_data(request, id):
"""A view that streams a large CSV file."""
# Generate a sequence of rows. The range is based on the maximum number of
# rows that can be handled by a single sheet in most spreadsheet
# applications.
if not request.session.get('is_login', None):
return redirect('/account/login/')
equipment = Equipment.objects.filter(
id=id)[0] if Equipment.objects.filter(id=id) else None
if not equipment:
return Http404
datas = equipment.data_set.all()
rows = ([data.value, data.created_time.strftime('%Y-%m-%d %H:%M:%S')]
for data in datas)
pseudo_buffer = Echo()
writer = csv.writer(pseudo_buffer)
writer.writerow(['value', 'created_time'])
response = StreamingHttpResponse((writer.writerow(row) for row in rows),
content_type="text/csv")
response['Content-Disposition'] = 'attachment; filename="{}.csv"'.format(
equipment.name)
return response
def get_equipment_data(request, id):
response = {
'Code': 0,
'Message': '未知错误!',
'Action': '',
'Data': ''
}
is_error = False
if request.method == 'GET':
action = request.GET.get('action', None)
datas = []
equipment = Equipment.objects.filter(
id=id)[0] if Equipment.objects.filter(id=id) else None
if action == 'day':
datas = equipment.data_set.filter(
created_time__gt=(datetime.datetime.now() + datetime.timedelta(days=-1)))
elif action == 'three_day':
datas = equipment.data_set.filter(
created_time__gt=(datetime.datetime.now() + datetime.timedelta(days=-3)))
elif action == 'week':
datas = equipment.data_set.filter(
created_time__gt=(datetime.datetime.now() + datetime.timedelta(days=-7)))
elif action == 'month':
datas = equipment.data_set.filter(
created_time__gt=(datetime.datetime.now() + datetime.timedelta(days=-30)))
elif action == 'three_month':
datas = equipment.data_set.filter(
created_time__gt=(datetime.datetime.now() + datetime.timedelta(days=-90)))
elif action == 'all':
datas = equipment.data_set.all()
elif action == 'date':
from_date = request.GET.get('from_date', None)
to_date = request.GET.get('to_date', None)
if (not from_date) or (not to_date):
response['Code'] = 141
response['Message'] = '未提供日期!'
else:
try:
start_date = time.strftime(
"%Y-%m-%d %H:%M:%S", from_date)
end_date = time.strftime(
"%Y-%m-%d %H:%M:%S", to_date)
except:
response['Code'] = 142
response['Message'] = '日期格式错误!'
datas = equipment.data_set.filter(
date_time_filed__range=(start_date, end_date))
if not is_error:
response['Code'] = 100
response['Message'] = '获取数据成功!'
response['Data'] = list({'name': data.created_time.strftime('%Y-%m-%d %H:%M:%S'),
'value': [data.created_time.strftime('%Y-%m-%d %H:%M:%S'), data.value]} for data in datas)[::-1]
return HttpResponse(json.dumps(response))
|
<filename>blingfireclient.library/inc/FATransform_hyph_redup_rev_t.h
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
#ifndef _FA_TRANSFORM_HYPH_REDUP_REV_T_H_
#define _FA_TRANSFORM_HYPH_REDUP_REV_T_H_
#include "FAConfig.h"
#include "FATransformCA_t.h"
#include "FASecurity.h"
namespace BlingFire
{
///
/// This class makes reverse to FATransform_hyph_redup_t transformation.
///
/// For example:
/// "aaab\x02ce" -> "aaab-aaabce"
/// Where 0x02 was selected as a delimiter
///
/// Note:
/// 1. If the transformation is not applicable then Process method return -1.
/// 2. if pIn == pOut then transformation is made in-place.
///
template < class Ty >
class FATransform_hyph_redup_rev_t : public FATransformCA_t < Ty > {
public:
FATransform_hyph_redup_rev_t ();
public:
/// sets up delimiter value
void SetDelim (const Ty Delim);
/// makes transformation
/// Note: this function has to be *within* the class definition, because otherwise compiler makes an error
/// and generates some warnings about "unreferenced local function has been removed".
const int Process (
const Ty * pIn,
const int InCount,
__out_ecount(MaxOutSize) Ty * pOut,
const int MaxOutSize
) const
{
// as __in_range(0, FALimits::MaxWordSize) const int InCount does not work
__analysis_assume (0 < InCount && FALimits::MaxWordSize >= InCount && pIn);
DebugLogAssert (0 < InCount && FALimits::MaxWordSize >= InCount && pIn);
// find reduplication delimiter
const int DelimPos = GetDelimPos (pIn, InCount);
DebugLogAssert (DelimPos < InCount);
// most of the times it won't be found
if (-1 == DelimPos) {
return -1;
} else {
// see FATransform_hyph_redup_rev_t< Ty >::GetDelimPos for details
__analysis_assume (0 <= DelimPos && InCount > DelimPos);
DebugLogAssert (0 <= DelimPos && InCount > DelimPos);
const int OutSize = InCount + DelimPos;
if (MaxOutSize >= OutSize) {
// move or copy suffix, if needed
const int SuffixLen = InCount - (DelimPos + 1);
if (0 < SuffixLen) {
const Ty * pSuffIn = pIn + DelimPos + 1;
Ty * pSuffOut = pOut + (DelimPos << 1) + 1;
for (int i = SuffixLen - 1; i >= 0; --i) {
pSuffOut [i] = pSuffIn [i];
}
}
// change delimiter to hyphen
pOut [DelimPos] = DefHyphen;
// check whether transformation is made in-place
if (pIn == pOut) {
Ty * pPrefOut = pOut + DelimPos + 1;
for (int i = 0; i < DelimPos; ++i) {
pPrefOut [i] = pIn [i];
}
} else {
Ty * pOut2 = pOut + DelimPos + 1;
for (int i = 0; i < DelimPos; ++i) {
const Ty Symbol = pIn [i];
pOut [i] = Symbol;
pOut2 [i] = Symbol;
}
}
} // of if (MaxOutSize >= OutSize) ...
return OutSize;
} // of if (-1 == DelimPos) ...
}
private:
// returns delimiter position, -1 if not found
inline const int GetDelimPos (const Ty * pIn, const int InCount) const;
private:
// delimiter
Ty m_Delim;
// constants
enum {
DefDelim = 2,
DefHyphen = '-',
};
};
template < class Ty >
FATransform_hyph_redup_rev_t< Ty >::
FATransform_hyph_redup_rev_t () :
m_Delim (DefDelim)
{
}
template < class Ty >
void FATransform_hyph_redup_rev_t< Ty >::
SetDelim (const Ty Delim)
{
m_Delim = Delim;
}
template < class Ty >
inline const int FATransform_hyph_redup_rev_t< Ty >::
GetDelimPos (const Ty * pIn, const int InCount) const
{
for (int i = 0; i < InCount; ++i) {
if (m_Delim == pIn [i]) {
return i;
}
}
return -1;
}
}
#endif
|
import React from 'react';
import { Form } from 'antd'; // Assuming the use of Ant Design library for form components
class CustomForm extends React.Component {
render() {
const {
layout = 'vertical',
form,
onSubmit,
onFinishFailed,
className,
style
} = this.props;
return (
<Form
layout={layout}
form={form}
autoComplete="off"
onFinish={onSubmit}
className={`form-container ${className || ''}`}
onFinishFailed={onFinishFailed}
style={style}
>
{/* Add form fields and submit button here */}
</Form>
);
}
}
export default CustomForm; |
import {
CacheInterceptor,
CacheModule,
MiddlewareConsumer,
Module,
NestModule,
} from '@nestjs/common'
import { ConfigModule, ConfigService } from '@nestjs/config'
import { TypeOrmModule, TypeOrmModuleOptions } from '@nestjs/typeorm'
import * as Joi from 'joi'
import { SnakeNamingStrategy } from 'typeorm-naming-strategies'
import { AppController } from './app.controller'
import { UserEntity } from './users/users.entity'
import { UsersModule } from './users/users.module'
import { TagsModule } from './tags/tags.module'
import { BlogsModule } from './blogs/blogs.module'
import { VisitorsModule } from './visitors/visitors.module'
import { BlogEntity } from './blogs/blogs.entity'
import { BlogImageEntity } from './blogs/blog-images.entity'
import { TagEntity } from './tags/tags.entity'
import { VisitorEntity } from './visitors/visitors.entity'
import { APP_INTERCEPTOR } from '@nestjs/core'
const typeOrmModuleOptions = {
useFactory: async (
configService: ConfigService,
): Promise<TypeOrmModuleOptions> => ({
namingStrategy: new SnakeNamingStrategy(),
type: 'postgres',
host: configService.get('DB_HOST'),
port: configService.get('DB_PORT'),
username: configService.get('DB_USERNAME'),
password: configService.get('DB_PASSWORD'),
database: configService.get('DB_NAME'),
entities: [
UserEntity,
BlogEntity,
BlogImageEntity,
TagEntity,
VisitorEntity,
],
synchronize: false, //! warning
autoLoadEntities: true,
logging: configService.get('NODE_ENV') === 'production' ? false : true,
keepConnectionAlive: true,
}),
inject: [ConfigService],
}
@Module({
imports: [
ConfigModule.forRoot({
isGlobal: true,
validationSchema: Joi.object({
NODE_ENV: Joi.string()
.valid('development', 'production', 'test', 'provision')
.default('development'),
PORT: Joi.number().default(5500),
SECRET_KEY: Joi.string().required(),
ADMIN_USER: Joi.string().required(),
ADMIN_PASSWORD: Joi.string().required(),
DB_USERNAME: Joi.string().required(),
DB_PASSWORD: Joi.string().required(),
DB_HOST: Joi.string().required(),
DB_PORT: Joi.number().required(),
DB_NAME: Joi.string().required(),
}),
}),
CacheModule.register({
ttl: 20, // seconds
max: 20, // maximum number of items in cache
}),
TypeOrmModule.forRootAsync(typeOrmModuleOptions),
UsersModule,
TagsModule,
BlogsModule,
VisitorsModule,
],
controllers: [AppController],
providers: [
{
provide: APP_INTERCEPTOR,
useClass: CacheInterceptor,
},
],
})
export class AppModule implements NestModule {
configure(consumer: MiddlewareConsumer) {
// const DEBUG = process.env.NODE_ENV === 'development' ? true : false
}
}
|
what I suppose to do :(()) |
package be.kwakeroni.evelyn.model.impl;
import be.kwakeroni.evelyn.model.Event;
import be.kwakeroni.evelyn.model.ParseException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
class RecordStructure {
private static RecordStructure INSTANCE = new RecordStructure();
private static DateTimeFormatter FORMAT = DateTimeFormatter.ofPattern("yyyyMMddHHmmss");
public static RecordStructure getInstance() {
return INSTANCE;
}
private RecordStructure() {
}
private static final Pattern ESCAPE_DETECT_PATTERN = Pattern.compile("(\\r\\n?|\\n|\\|)");
private static final Pattern ESCAPE_PATTERN = Pattern.compile("(\\r\\n?|\\n|\\\\|\\|)");
public String toData(Event event) {
String data = event.getData();
boolean isEscaping = ESCAPE_DETECT_PATTERN.matcher(data).find();
StringBuffer buffer = (isEscaping) ? new StringBuffer("\\") : new StringBuffer(" ");
buffer.append(event.getTime().format(FORMAT)).append('|');
buffer.append(check(event.getObjectId())).append('|');
buffer.append(check(event.getUser())).append('|');
buffer.append(check(event.getOperation())).append('|');
if (isEscaping) {
Matcher matcher = ESCAPE_PATTERN.matcher(data);
while (matcher.find()) {
if ("\n".equals(matcher.group(1))
|| "\r\n".equals(matcher.group(1))
|| "\r".equals(matcher.group(1))) {
matcher.appendReplacement(buffer, "\\\\n");
} else {
matcher.appendReplacement(buffer, "\\\\$1");
}
}
matcher.appendTail(buffer);
} else {
buffer.append(data);
}
return buffer.toString();
}
private String check(String field) {
return field;
}
public Event toEvent(String data) throws ParseException {
String[] split = splitAndUnescape(data);
if (split.length != 5) {
throw new ParseException("Unexpected number of columns: " + split.length);
}
return new Event() {
public String getTimestamp() {
return split[0];
}
@Override
public LocalDateTime getTime() {
return LocalDateTime.parse(getTimestamp(), FORMAT);
}
@Override
public String getUser() {
return split[2];
}
@Override
public String getObjectId() {
return split[1];
}
@Override
public String getOperation() {
return split[3];
}
@Override
public String getData() {
return split[4];
}
};
}
private static final Pattern SPLIT_PATTERN = Pattern.compile("(?<!\\\\)\\|");
private static final Pattern UNESCAPE_PATTERN = Pattern.compile("\\\\([n|\\\\])");
private String[] splitAndUnescape(String data) {
if (data.startsWith("\\")) {
String[] array = SPLIT_PATTERN.split(data);
// Remove leading space
array[0] = array[0].substring(1);
// Unescape data
array[4] = unescape(array[4]);
return array;
} else {
// Not escaped.
String[] array = data.split("\\|");
// Remove leading space
array[0] = array[0].substring(1);
return array;
}
}
private String unescape(String string) {
//array[4] = array[4].replaceAll("\\\\(\\\\|\\|)", "$1");
Matcher matcher = UNESCAPE_PATTERN.matcher(string);
StringBuffer buffer = new StringBuffer();
while (matcher.find()) {
String escaped = matcher.group(1);
if ("n".equals(escaped)) {
matcher.appendReplacement(buffer, System.lineSeparator());
} else {
matcher.appendReplacement(buffer, "$1");
}
}
matcher.appendTail(buffer);
return buffer.toString();
}
}
|
<filename>lang/py/cookbook/v2/source/cb2_8_7_sol_1.py
# code snippet to include in your sitecustomize.py
import sys
def info(type, value, tb):
if hasattr(sys, 'ps1') or not (
sys.stderr.isatty() and sys.stdin.isatty()
) or issubclass(type, SyntaxError):
# Interactive mode, no tty-like device, or syntax error: nothing
# to do but call the default hook
sys.__excepthook__(type, value, tb)
else:
import traceback, pdb
# You are NOT in interactive mode; so, print the exception...
traceback.print_exception(type, value, tb)
print
# ...then start the debugger in post-mortem mode
pdb.pm()
sys.excepthook = info
|
#! /bin/bash
mkdir /home/dmwm/dbs_test/
pushd /home/dmwm/dbs_test/
curl -o .pylintrc https://raw.githubusercontent.com/dmwm/WMCore/master/standards/.pylintrc
git clone https://github.com/dmwm/DBS.git
pushd DBS
git checkout master
popd
popd
|
#!/usr/bin/env bash
# ******************************
# * *
# * Configure OSX GUI *
# * *
#*******************************
# Credits https://pawelgrzybek.com/change-macos-user-preferences-via-command-line/
_installOSXsetup() {
unset THISNAME DATA
local THISNAME='Configure OSX GUI'
local DATA=$(date +%Y-%m-%d-%S)
_f_alert_check "Checking for ${THISNAME} ..."
# * Backup before changes
_f_alert_notice "Backup before changes in file ${DATA}_before.txt"
read -p "$(_f_alert_warning "Overwriting ... Continue? ... [yN]")" -n 1
echo ''
if [[ $REPLY =~ ^[Yy]$ ]]; then
defaults read > zz_pref_pane_settings/${DATA}_before.txt
osascript -e 'tell application "System Preferences" to quit'
# To make changes that require system password uncomment following
#sudo -v
#while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
# * Boot
# Disable the sound effects on boot
sudo nvram SystemAudioVolume=" "
# * System
# Save to disk (not to iCloud) by default
defaults write NSGlobalDomain NSDocumentSaveNewDocumentsToCloud -bool false
# Remove duplicates in the “Open With” menu (also see `lscleanup` alias)
/System/Library/Frameworks/CoreServices.framework/Frameworks/LaunchServices.framework/Support/lsregister -kill -r -domain local -domain system -domain user
# Disable auto-correct
defaults write NSGlobalDomain NSAutomaticSpellingCorrectionEnabled -bool false
# Always show scroll bars
defaults write NSGlobalDomain AppleShowScrollBars -string "Always"
# Show all filename extensions in Finder by default
defaults write NSGlobalDomain AppleShowAllExtensions -bool false
# Avoid creation of .DS_Store files on network volumes
defaults write com.apple.desktopservices DSDontWriteNetworkStores -bool true
# * Language
defaults write NSGlobalDomain AppleLanguages -array "it"
defaults write NSGlobalDomain AppleLocale -string "it_IT@currency=EUR"
defaults write NSGlobalDomain AppleMeasurementUnits -string "Centimeters"
defaults write NSGlobalDomain AppleMetricUnits -bool true
# * Screen
# System Preferences > Desktop & Screen Saver > Start after: Never
defaults -currentHost write com.apple.screensaver idleTime -int 0
# Require password immediately after sleep or screen saver begins
defaults write com.apple.screensaver askForPassword -int 1
defaults write com.apple.screensaver askForPasswordDelay -int 0
# * Finder
# Show the ~/Library folder
chflags nohidden ~/Library
# Show the /Volumes folder
# sudo chflags nohidden /Volumes
# Show icons for hard drives, servers, and removable media on the desktop
defaults write com.apple.finder ShowExternalHardDrivesOnDesktop -bool true
defaults write com.apple.finder ShowHardDrivesOnDesktop -bool true
defaults write com.apple.finder ShowMountedServersOnDesktop -bool true
defaults write com.apple.finder ShowRemovableMediaOnDesktop -bool true
#
defaults write com.apple.finder NewWindowTarget -string "PfHm"
defaults write com.apple.finder NewWindowTargetPath -string "file://${HOME}"
# Finder: show status bar
defaults write com.apple.finder ShowStatusBar -bool true
# Finder: show path bar
defaults write com.apple.finder ShowPathbar -bool true
# * Terminal
# Stop “Resume” feature
defaults write com.apple.Terminal NSQuitAlwaysKeepsWindows -bool false
# * Dock
# System Preferences > Dock > Automatically hide and show the Dock:
defaults write com.apple.dock autohide -bool true
# System Preferences > Dock > Show indicators for open applications
defaults write com.apple.dock show-process-indicators -bool true
# * Trackpad
# System Preferences > Trackpad > Tap to click
defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad Clicking -bool true
defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad Clicking -bool true
defaults -currentHost write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
defaults write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
# Disable “natural” (Lion-style) scrolling
defaults write NSGlobalDomain com.apple.swipescrolldirection -bool false
# * Photos
# Save screenshots in JPG format (other options: BMP, GIF, JPG, PDF, TIFF)
# Prevent Photos from opening automatically when devices are plugged in
defaults -currentHost write com.apple.ImageCapture disableHotPlug -bool true
defaults -currentHost write com.apple.ImageCapture2 HotPlugActionPath ''
# Save screenshots to the desktop
defaults write com.apple.screencapture location -string "$HOME/Desktop"
defaults write com.apple.screencapture type jpg
# * iMail
# Disable signing emails by default
# defaults write ~/Library/Preferences/org.gpgtools.gpgmail SignNewEmailsByDefault -bool false
# Kill affected apps
for app in "Dock" "Finder"; do
killall "${app}" > /dev/null 2>&1
done
# * Backup after changes
_f_alert_notice "Backup after changes in file ${DATA}_after.txt"
defaults read > zz_pref_pane_settings/${DATA}_after.txt
_f_alert_warning "Some changes require a logout/restart to take effect."
fi
_f_alert_success "${THISNAME}"
unset THISNAME DATA
}
_installOSXsetup
_f_by_by
|
<gh_stars>10-100
#include "MemBitMap.hpp"
#include "catch2/catch.hpp"
CATCH_TEST_CASE("MemBitMap tests")
{
auto bitmap = new MemBitMap();
CATCH_SECTION("basic map")
{
bitmap->add_map(0x1000, 0x100);
CATCH_REQUIRE(bitmap->has_addr(0x1000));
bitmap->set_bit_range(0x1000, 0x10, MapFlag::BLOCK);
CATCH_REQUIRE(bitmap->get_bit(0x1001, MapFlag::BLOCK));
CATCH_REQUIRE_FALSE(bitmap->get_bit(0x1001, MapFlag::SCAN));
CATCH_REQUIRE(bitmap->get_bit(0x1001, MapFlag::ANY));
CATCH_REQUIRE(bitmap->get_bit(0x1001, (MapFlag::BLOCK | MapFlag::SCAN)));
CATCH_REQUIRE(bitmap->get_bit(0x1001, (MapFlag::SCAN ^ MapFlag::ANY)));
bitmap->set_bit_range(0x1020, 0x10, (MapFlag::BLOCK | MapFlag::SWITCH));
CATCH_REQUIRE(bitmap->get_bit(0x1021, MapFlag::BLOCK));
CATCH_REQUIRE(bitmap->get_bit(0x1021, MapFlag::SWITCH));
CATCH_REQUIRE(bitmap->get_bit(0x1021, (MapFlag::BLOCK | MapFlag::SCAN)));
CATCH_REQUIRE_FALSE(bitmap->get_bit(0x1021, MapFlag::SCAN));
}
CATCH_SECTION("overlapping maps") {
bitmap->add_map(0x1000, 0x100);
bitmap->set_bit_range(0x1000, 0x10, (MapFlag::BLOCK | MapFlag::SWITCH));
CATCH_REQUIRE(bitmap->get_bit(0x1001, MapFlag::BLOCK));
CATCH_REQUIRE(bitmap->get_bit(0x1001, MapFlag::SWITCH));
CATCH_REQUIRE(bitmap->get_bit(0x1001, (MapFlag::BLOCK | MapFlag::SWITCH)));
CATCH_REQUIRE(bitmap->get_flag(0x1001) == (MapFlag::BLOCK | MapFlag::SWITCH));
}
CATCH_SECTION("clearing ranges") {
bitmap->add_map(0x1000, 0x100);
bitmap->set_bit_range(0x1000, 0x10, MapFlag::BLOCK);
CATCH_REQUIRE(bitmap->get_bit(0x1001, MapFlag::BLOCK));
CATCH_REQUIRE(bitmap->clear_bit_range(0x1000, 0x5));
CATCH_REQUIRE_FALSE(bitmap->get_bit(0x1000, MapFlag::BLOCK));
CATCH_REQUIRE(bitmap->get_bit(0x1006, MapFlag::BLOCK));
}
delete bitmap;
}
|
/**
* The MIT License (MIT)
*
* Copyright (c) 2014 <NAME>, University of Massachusetts
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#ifndef POMDP_PBVI_H
#define POMDP_PBVI_H
#include "pomdp.h"
#include "../core/policy/policy_alpha_vectors.h"
#include "../core/policy/policy_alpha_vector.h"
#include "../core/states/states_map.h"
#include "../core/actions/actions_map.h"
#include "../core/observations/observations_map.h"
#include "../core/state_transitions/state_transitions_map.h"
#include "../core/observation_transitions/observation_transitions_map.h"
#include "../core/rewards/saso_rewards.h"
#include "../core/horizon.h"
/**
* List the possible expansion rules available while using PBVI.
*/
enum POMDPPBVIExpansionRule {
NONE,
RANDOM_BELIEF_SELECTION,
STOCHASTIC_SIMULATION_RANDOM_ACTION,
STOCHASTIC_SIMULATION_GREEDY_ACTION,
STOCHASTIC_SIMULATION_EXPLORATORY_ACTION,
GREEDY_ERROR_REDUCTION,
NUM_EXPANSION_RULES
};
/**
* Solve an POMDP via Point-Based Value Iteration (PBVI) (finite or infinite horizon). This
* solver has the following requirements:
* - POMDP states must be of type FiniteStates.
* - POMDP actions must be of type FiniteActions.
* - POMDP observations must be of type FiniteObservations.
* - POMDP state transitions must be of type FiniteStateTransitions.
* - POMDP observation transitions must be of type FiniteObservationTransitions.
* - POMDP rewards must be of type SASRewards.
*/
class POMDPPBVI {
public:
/**
* The default constructor for the POMDPPBVI class. Default number of iterations for infinite
* horizon POMDPs is 1. The default expansion rule is Random Belief Selection.
*/
POMDPPBVI();
/**
* A constructor for the POMDPPBVI class which allows for the specification of the expansion rule,
* and the number of iterations (both updates and expansions) to run for infinite horizon.
* The default is 1 for both.
* @param expansionRule The expansion rule to use.
* @param updateIterations The number of update iterations to run for infinite horizon POMDPs.
* @param expansionIterations The number of expansion iterations to run for infinite horizon POMDPs.
*/
POMDPPBVI(POMDPPBVIExpansionRule expansionRule, unsigned int updateIterations, unsigned int expansionIterations);
/**
* The deconstructor for the POMDPPBVI class. This method frees all the belief state memory.
*/
virtual ~POMDPPBVI();
/**
* Add an initial belief state which is used to seed the belief states before computing the optimal policy.
* Note: This relinquishes control of the belief state's memory management to this class.
* @param b The initial set of belief states before calling 'solve'.
*/
virtual void add_initial_belief_state(BeliefState *b);
/**
* Set the initial set of belief states which are used to seed the belief states before computing
* the optimal policy. Note: This relinquishes control of the belief states' memory management to this class.
* @param initialBeliefStates The initial set of belief states before calling 'solve'.
*/
virtual void set_initial_belief_states(const std::vector<BeliefState *> &initialBeliefStates);
/**
* Set the expansion rule to add belief points.
* @param expansionRule The expansion rule to use.
*/
virtual void set_expansion_rule(POMDPPBVIExpansionRule expansionRule);
/**
* Set the number of update iterations to run for infinite horizon POMDPs.
* @param iterations The number of update iterations to run for infinite horizon POMDPs.
*/
virtual void set_num_update_iterations(unsigned int iterations);
/**
* Set the number of expansion iterations to run for infinite horizon POMDPs.
* @param iterations The number of expansion iterations to run for infinite horizon POMDPs.
*/
virtual void set_num_expansion_iterations(unsigned int iterations);
/**
* Get the initial set of belief states which are used to seed the belief states before computing
* the optimal policy.
* @return The initial set of belief states before calling 'solve'.
*/
virtual std::vector<BeliefState *> &get_initial_belief_states();
/**
* Get the set of belief states which were used to compute the optimal policy. This vector is only
* populated after calling 'solve'.
* @return The final set of belief states after calling 'solve'.
*/
virtual std::vector<BeliefState *> &get_belief_states();
/**
* Set the expansion rule to add belief points.
* @param expansionRule The expansion rule to use.
*/
virtual POMDPPBVIExpansionRule get_expansion_rule() const;
/**
* Get the number of update iterations to run for infinite horizon POMDPs.
* @return The number of update iterations to run for infinite horizon POMDPs.
*/
virtual unsigned int get_num_update_iterations() const;
/**
* Get the number of expansion iterations to run for infinite horizon POMDPs.
* @return The number of expansion iterations to run for infinite horizon POMDPs.
*/
virtual unsigned int get_num_expansion_iterations() const;
/**
* Compute the optimal number of update iterations to run for infinite horizon POMDPs, given
* the desired tolerance, requiring knowledge of the reward function.
* @param pomdp The partially observable Markov decision process to use.
* @param epsilon The desired tolerance between value functions to check for convergence.
* @throw RewardException The POMDP did not have a SARewards rewards object.
*/
virtual void compute_num_update_iterations(POMDP *pomdp, double epsilon);
/**
* Solve the POMDP provided using point-based value iteration.
* @param pomdp The partially observable Markov decision process to solve.
* @throw CoreException The POMDP was null.
* @throw StateException The POMDP did not have a FiniteStates states object.
* @throw ActionException The POMDP did not have a FiniteActions actions object.
* @throw ObservationException The POMDP did not have a FiniteObservations observations object.
* @throw StateTransitionsException The POMDP did not have a FiniteStateTransitions state transitions object.
* @throw ObservationTransitionsException The POMDP did not have a FiniteObservationTransitions observation transitions object.
* @throw RewardException The POMDP did not have a SASRewards rewards object.
* @throw PolicyException An error occurred computing the policy.
* @return Return the optimal policy as a finite state controller (infinite horizon) or tree (finite horizon).
*/
virtual PolicyAlphaVectors *solve(POMDP *pomdp);
/**
* Reset this POMDP PBVI solver. This method frees all the belief state memory.
*/
virtual void reset();
protected:
/**
* Solve a finite horizon POMDP using point-based value iteration.
* @param S The finite states.
* @param A The finite actions.
* @param Z The finite observations.
* @param T The finite state transition function.
* @param O The finite observation transition function.
* @param R The state-action-state-observation rewards.
* @param h The horizon.
* @throw PolicyException An error occurred computing the policy.
* @return Return the optimal policy as a collection of alpha vectors.
*/
virtual PolicyAlphaVectors *solve_finite_horizon(StatesMap *S, ActionsMap *A, ObservationsMap *Z,
StateTransitions *T, ObservationTransitions *O, Rewards *R,
Horizon *h);
/**
* Solve an infinite horizon POMDP using point-based value iteration.
* @param S The finite states.
* @param A The finite actions.
* @param Z The finite observations.
* @param T The finite state transition function.
* @param O The finite observation transition function.
* @param R The state-action rewards.
* @param h The horizon.
* @throw PolicyException An error occurred computing the policy.
* @return Return the optimal policy as a collection of alpha vectors.
*/
virtual PolicyAlphaVectors *solve_infinite_horizon(StatesMap *S, ActionsMap *A, ObservationsMap *Z,
StateTransitions *T, ObservationTransitions *O, Rewards *R,
Horizon *h);
/**
* Expand the set of beliefs following Random Belief Selection. This works by randomly selecting a set of new
* belief points at each expansion. One new point is selected for each current belief point, doubling the total
* quantity each time.
* @param S The finite states.
*/
virtual void expand_random_belief_selection(StatesMap *S);
/**
* Expand the set of beliefs following Stochastic Simulation with Random Actions. "Stochastic Simulation" means it
* generates belief points which are reachable given the initial set of belief points, i.e., it traverses the belief
* tree. In this case, for each belief point it randomly selects a state, proportional to the belief, then randomly
* selects an action (uniformly), then randomly selects a next state and next observation. The result is a new belief
* point.
* @param S The finite states.
* @param A The finite actions.
* @param Z The finite observations.
* @param T The finite state transition function.
* @param O The finite observation transition function.
*/
virtual void expand_stochastic_simulation_random_actions(StatesMap *S, ActionsMap *A, ObservationsMap *Z,
StateTransitions *T, ObservationTransitions *O);
/**
* Expand the set of beliefs following Stochastic Simulation with Greedy Action. "Stochastic Simulation" means it
* generates belief points which are reachable given the initial set of belief points, i.e., it traverses the belief
* tree. In this case, for each belief point it randomly selects a state, proportional to the belief, then randomly
* rolls a die. If it is less than some epsilon, it randomly selects an action (uniformly); otherwise, it selects the
* optimal action. Next, it randomly selects a next state and next observation. The result is a new belief point.
* @param S The finite states.
* @param A The finite actions.
* @param Z The finite observations.
* @param T The finite state transition function.
* @param O The finite observation transition function.
* @param gamma The current set of alpha vectors.
*/
virtual void expand_stochastic_simulation_greedy_action(StatesMap *S, ActionsMap *A, ObservationsMap *Z,
StateTransitions *T, ObservationTransitions *O, std::vector<PolicyAlphaVector *> &gamma);
/**
* Expand the set of beliefs following Stochastic Simulation with Exploratory Action. "Stochastic Simulation" means it
* generates belief points which are reachable given the initial set of belief points, i.e., it traverses the belief
* tree. In this case, for each belief point it adds a new belief point which maximizes over the actions, given a randomly
* selected next belief point following this action, selecting the point which is farthest away from the closest belief point.
* @param S The finite states.
* @param A The finite actions.
* @param Z The finite observations.
* @param T The finite state transition function.
* @param O The finite observation transition function.
*/
virtual void expand_stochastic_simulation_exploratory_action(StatesMap *S, ActionsMap *A, ObservationsMap *Z,
StateTransitions *T, ObservationTransitions *O);
/**
* Expand the set of beliefs following Greedy Error Reduction. TODO: Implement.
*/
virtual void expand_greedy_error_reduction();
/**
* The expansion rule to use which adds belief points.
*/
POMDPPBVIExpansionRule rule;
/**
* The number of update iterations until the solver stops for infinite horizon POMDPs.
*/
unsigned int updates;
/**
* The number of expansion iterations until the solver stops for infinite horizon POMDPs.
*/
unsigned int expansions;
/**
* The initial set of belief points.
*/
std::vector<BeliefState *> initialB;
/**
* The final set of belief points after the solver's iterations.
*/
std::vector<BeliefState *> B;
};
#endif // POMDP_PBVI_H
|
#!/bin/bash
# AUTHORS
#
# The Veracruz Development Team.
#
# COPYRIGHT
#
# See the `LICENSE.markdown` file in the Veracruz root directory for licensing
# and copyright information.
cd /work/rust-optee-trustzone-sdk/optee-qemuv8-3.7.0/build
ln -sf /work/rust-optee-trustzone-sdk/optee-qemuv8-3.7.0/out-br/images/rootfs.cpio.gz ../out/bin
cd /work/rust-optee-trustzone-sdk/optee-qemuv8-3.7.0/out/bin && /work/rust-optee-trustzone-sdk/optee-qemuv8-3.7.0/qemu/aarch64-softmmu/qemu-system-aarch64 \
-nodefaults \
-nographic \
-serial stdio -serial file:/tmp/serial.log \
-smp 2 \
-s -machine virt,secure=on -cpu cortex-a57 \
-d unimp -semihosting-config enable,target=native \
-m 1057 \
-bios bl1.bin \
-initrd rootfs.cpio.gz \
-kernel Image -no-acpi \
-append 'console=ttyAMA0,38400 keep_bootcon root=/dev/vda2' \
-fsdev local,id=fsdev0,path=/tmp/vc_test/shared,security_model=none \
-device virtio-9p-device,fsdev=fsdev0,mount_tag=host \
-netdev user,id=vmnic \
-device virtio-net-device,netdev=vmnic
|
#!/bin/sh
#SBATCH -N 1 # nodes requested
#SBATCH -n 1 # tasks requested
SBATCH --partition=standard
SBATCH --gres=gpu:8
#SBATCH --mem=12000 # memory in Mb
#SBATCH --time=0-08:00:00
export CUDA_HOME=/opt/cuda-9.0.176.1/
export CUDNN_HOME=/opt/cuDNN-7.0/
export STUDENT_ID=$(whoami)
export LD_LIBRARY_PATH=${CUDNN_HOME}/lib64:${CUDA_HOME}/lib64:$LD_LIBRARY_PATH
export LIBRARY_PATH=${CUDNN_HOME}/lib64:$LIBRARY_PATH
export CPATH=${CUDNN_HOME}/include:$CPATH
export PATH=${CUDA_HOME}/bin:${PATH}
export PYTHON_PATH=$PATH
mkdir -p /disk/scratch/${STUDENT_ID}
export TMPDIR=/disk/scratch/${STUDENT_ID}/
export TMP=/disk/scratch/${STUDENT_ID}/
mkdir -p ${TMP}/datasets/
export DATASET_DIR=${TMP}/datasets/
# Activate the relevant virtual environment:
source /home/${STUDENT_ID}/miniconda3/bin/activate textworld
cd baselines/sample_submission_lstm-dqn
python train.py ../../experiments/banana_baseline/games/ws-2_ql-2_no-4_seed-1234.ulx -c ../../experiments/banana_baseline/config/ws-2_ql-2_no-4_seed-1234.yaml
|
var _maximum_for_leaky_relu_8cpp =
[
[ "BOOST_FIXTURE_TEST_CASE", "_maximum_for_leaky_relu_8cpp.xhtml#a40c4197840ac14d9cccab5585285ad37", null ],
[ "BOOST_FIXTURE_TEST_CASE", "_maximum_for_leaky_relu_8cpp.xhtml#a89f2f61aa18c4c179f26e8a0d328b12c", null ],
[ "BOOST_FIXTURE_TEST_CASE", "_maximum_for_leaky_relu_8cpp.xhtml#ab9a81bbe6f6f26726e1f1ca82de30f90", null ],
[ "BOOST_FIXTURE_TEST_CASE", "_maximum_for_leaky_relu_8cpp.xhtml#a340395ee6c0f467fcacdb0ee0068b251", null ],
[ "BOOST_FIXTURE_TEST_CASE", "_maximum_for_leaky_relu_8cpp.xhtml#ab7430cd4a19a02d8b56e2e47adbebad3", null ]
]; |
// from: https://gitlab.com/mpapp-public/prosemirror-recreate-steps/blob/master/src/recreate.js
import {
Transform, ReplaceStep
} from 'prosemirror-transform'
import {
applyPatch, createPatch
} from 'rfc6902'
import {diffWordsWithSpace, diffChars} from 'diff'
function getReplaceStep (fromDoc, toDoc) {
let start = toDoc.content.findDiffStart(fromDoc.content)
if (start === null) {
return false
}
let {
a: endA,
b: endB
} = toDoc.content.findDiffEnd(fromDoc.content)
const overlap = start - Math.min(endA, endB)
if (overlap > 0) {
if (
// If there is an overlap, there is some freedom of choise in how to calculate the start/end boundary.
// for an inserted/removed slice. We choose the extreme with the lowest depth value.
fromDoc.resolve(start - overlap).depth < toDoc.resolve(endA + overlap).depth
) {
start -= overlap
} else {
endA += overlap
endB += overlap
}
}
return new ReplaceStep(start, endB, toDoc.slice(start, endA))
}
class RecreateTransform {
constructor(fromDoc, toDoc, complexSteps, wordDiffs) {
this.fromDoc = fromDoc
this.toDoc = toDoc
this.complexSteps = complexSteps // Whether to return steps other than ReplaceSteps
this.wordDiffs = wordDiffs // Whether to make text diffs cover entire words
this.schema = fromDoc.type.schema
this.tr = new Transform(fromDoc)
}
init() {
if (this.complexSteps) {
// For First steps: we create versions of the documents without marks as
// these will only confuse the diffing mechanism and marks won't cause
// any mapping changes anyway.
this.currentJSON = this.marklessDoc(this.fromDoc).toJSON()
this.finalJSON = this.marklessDoc(this.toDoc).toJSON()
this.ops = createPatch(this.currentJSON, this.finalJSON)
this.recreateChangeContentSteps()
this.recreateChangeMarkSteps()
} else {
// We don't differentiate between mark changes and other changes.
this.currentJSON = this.fromDoc.toJSON()
this.finalJSON = this.toDoc.toJSON()
this.ops = createPatch(this.currentJSON, this.finalJSON)
this.recreateChangeContentSteps()
}
this.simplifyTr()
return this.tr
}
recreateChangeContentSteps() {
// First step: find content changing steps.
let ops = []
while (this.ops.length) {
let op = this.ops.shift(),
toDoc = false
const afterStepJSON = JSON.parse(JSON.stringify(this.currentJSON)),
pathParts = op.path.split('/')
ops.push(op)
while (!toDoc) {
applyPatch(afterStepJSON, [op])
try {
toDoc = this.schema.nodeFromJSON(afterStepJSON)
toDoc.check()
} catch (error) {
toDoc = false
if (this.ops.length) {
op = this.ops.shift()
ops.push(op)
} else {
throw new Error('No valid diff possible!')
}
}
}
if (this.complexSteps && ops.length === 1 && (pathParts.includes('attrs') || pathParts.includes('type'))) {
// Node markup is changing
this.addSetNodeMarkup()
ops = []
} else if (ops.length === 1 && op.op === 'replace' && pathParts[pathParts.length - 1] === 'text') {
// Text is being replaced, we apply text diffing to find the smallest possible diffs.
this.addReplaceTextSteps(op, afterStepJSON)
ops = []
} else {
if (this.addReplaceStep(toDoc, afterStepJSON)) {
ops = []
}
}
}
}
recreateChangeMarkSteps() {
// Now the documents should be the same, except their marks, so everything should map 1:1.
// Second step: Iterate through the toDoc and make sure all marks are the same in tr.doc
this.toDoc.descendants((tNode, tPos) => {
if (!tNode.isInline) {
return true
}
this.tr.doc.nodesBetween(tPos, tPos + tNode.nodeSize, (fNode, fPos) => {
if (!fNode.isInline) {
return true
}
const from = Math.max(tPos, fPos),
to = Math.min(tPos + tNode.nodeSize, fPos + fNode.nodeSize)
fNode.marks.forEach(nodeMark => {
if (!nodeMark.isInSet(tNode.marks)) {
this.tr.removeMark(from, to, nodeMark)
}
})
tNode.marks.forEach(nodeMark => {
if (!nodeMark.isInSet(fNode.marks)) {
this.tr.addMark(from, to, nodeMark)
}
})
})
})
}
marklessDoc(doc) {
const tr = new Transform(doc)
tr.removeMark(0, doc.nodeSize - 2)
return tr.doc
}
// From http://prosemirror.net/examples/footnote/
addReplaceStep(toDoc, afterStepJSON) {
const fromDoc = this.schema.nodeFromJSON(this.currentJSON),
step = getReplaceStep(fromDoc, toDoc)
if (!step) {
return false
} else if (!this.tr.maybeStep(step).failed) {
this.currentJSON = afterStepJSON
} else {
throw new Error('No valid step found.')
}
}
addSetNodeMarkup() {
const fromDoc = this.schema.nodeFromJSON(this.currentJSON),
toDoc = this.schema.nodeFromJSON(this.finalJSON),
start = toDoc.content.findDiffStart(fromDoc.content),
fromNode = fromDoc.nodeAt(start),
toNode = toDoc.nodeAt(start)
if (start != null) {
this.tr.setNodeMarkup(start, fromNode.type === toNode.type ? null : toNode.type, toNode.attrs, toNode.marks)
this.currentJSON = this.marklessDoc(this.tr.doc).toJSON()
// Setting the node markup may have invalidated more ops, so we calculate them again.
this.ops = createPatch(this.currentJSON, this.finalJSON)
}
}
addReplaceTextSteps(op, afterStepJSON) {
// We find the position number of the first character in the string
const op1 = Object.assign({}, op, {value: 'xx'}),
op2 = Object.assign({}, op, {value: 'yy'})
const afterOP1JSON = JSON.parse(JSON.stringify(this.currentJSON)),
afterOP2JSON = JSON.parse(JSON.stringify(this.currentJSON)),
pathParts = op.path.split('/')
let obj = this.currentJSON
applyPatch(afterOP1JSON, [op1])
applyPatch(afterOP2JSON, [op2])
const op1Doc = this.schema.nodeFromJSON(afterOP1JSON),
op2Doc = this.schema.nodeFromJSON(afterOP2JSON)
let offset = op1Doc.content.findDiffStart(op2Doc.content)
const marks = op1Doc.resolve(offset + 1).marks()
pathParts.shift()
while (pathParts.length) {
const pathPart = pathParts.shift()
obj = obj[pathPart]
}
const finalText = op.value,
currentText = obj
const textDiffs = this.wordDiffs ? diffWordsWithSpace(currentText, finalText) : diffChars(currentText, finalText)
while (textDiffs.length) {
const diff = textDiffs.shift()
if (diff.added) {
if (textDiffs.length && textDiffs[0].removed) {
const nextDiff = textDiffs.shift()
this.tr.replaceWith(
offset,
offset + nextDiff.value.length,
this.schema.nodeFromJSON({type: 'text', text: diff.value}).mark(marks)
)
} else {
this.tr.insert(
offset,
this.schema.nodeFromJSON({type: 'text', text: diff.value}).mark(marks)
)
}
offset += diff.value.length
} else if (diff.removed) {
if (textDiffs.length && textDiffs[0].added) {
const nextDiff = textDiffs.shift()
this.tr.replaceWith(
offset,
offset + diff.value.length,
this.schema.nodeFromJSON({type: 'text', text: nextDiff.value}).mark(marks)
)
offset += nextDiff.value.length
} else {
this.tr.delete(offset, offset + diff.value.length)
}
} else {
offset += diff.value.length
}
}
this.currentJSON = afterStepJSON
}
// join adjacent ReplaceSteps
simplifyTr() {
if (!this.tr.steps.length) {
return
}
const newTr = new Transform(this.tr.docs[0]),
oldSteps = this.tr.steps.slice()
while (oldSteps.length) {
let step = oldSteps.shift()
while (oldSteps.length && step.merge(oldSteps[0])) {
const addedStep = oldSteps.shift()
if (step instanceof ReplaceStep && addedStep instanceof ReplaceStep) {
step = getReplaceStep(newTr.doc, addedStep.apply(step.apply(newTr.doc).doc).doc)
} else {
step = step.merge(addedStep)
}
}
newTr.step(step)
}
this.tr = newTr
}
}
export function recreateTransform(fromDoc, toDoc, complexSteps = true, wordDiffs = false) {
const recreator = new RecreateTransform(fromDoc, toDoc, complexSteps, wordDiffs)
return recreator.init()
}
|
<gh_stars>0
import {
async,
inject,
TestBed
} from '@angular/core/testing';
import {
MockBackend
} from '@angular/http/testing';
import {
XHRBackend,
Response,
HttpModule
} from '@angular/http';
import { OAuthService } from './oauth.service';
import { AUTH_CONFIG } from '../models/opAuthConfig';
import 'rxjs/add/observable/of';
import 'rxjs/add/operator/catch';
import 'rxjs/add/operator/do';
import 'rxjs/add/operator/toPromise';
const AuthConfigMock = {
clientId: '1',
clientSecret: '2',
baseUrl: 'games.com',
frontUrl: 'google.com',
grantPath: '/oauth/v2/token',
revokePath: '/oauth/v2/revoke',
googleMapsApiKey: 'A3'
};
describe('OAuthService', () => {
beforeEach(async(() => {
TestBed.configureTestingModule({
imports: [HttpModule],
providers: [
OAuthService,
{ provide: XHRBackend, useClass: MockBackend },
{ provide: AUTH_CONFIG, useValue: AuthConfigMock }
]
});
}));
it('can instantiate service when inject service', () => {
inject([OAuthService], (service: OAuthService) => {
expect(service instanceof OAuthService).toBe(true);
});
});
it('can instantiate service with "new"', () => {
inject([OAuthService], (service: OAuthService) => {
expect(service instanceof OAuthService).toBe(true, 'new service should be ok');
});
});
it('can provide the mockBackend as XHRBackend',
inject([XHRBackend], (backend: MockBackend) => {
expect(backend).not.toBeNull('backend should be provided');
})
);
describe('OAuthService', () => {
let service: OAuthService;
beforeEach(inject([OAuthService], (ser: OAuthService) => {
service = ser;
}));
it('should have expected access token', async(inject([], () => {
service.getClientToken().subscribe(
(data: Response) => expect(data).toBeTruthy()
);
})));
it('should have expected client token', async(inject([], () => {
let username = '<EMAIL>';
let password = '<PASSWORD>!';
service.getAccessToken(username, password).subscribe(
(data: Response) => expect(data).toBeTruthy()
);
})));
it('should not have expected client token', async(inject([], () => {
let username = '<EMAIL>';
let password = '<PASSWORD>!';
service.getAccessToken(username, password).subscribe(
data => {},
error => {
expect(error.status).toEqual(400);
}
);
})));
it('should have refresh token', async(inject([], () => {
service.getRefreshToken().subscribe(
(data: Response) => {
expect(data).toBeTruthy();
}
);
})));
});
});
|
/*
* Copyright (c) 2018-2021 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.grons.metrics4.scala
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class StringUtilsSpec extends AnyFunSpec {
describe("collapseDots") {
it("strips leading dot") {
StringUtils.collapseDots(".foo.bar") should equal ("foo.bar")
}
it("strips trailing dot") {
StringUtils.collapseDots("foo.bar.") should equal ("foo.bar")
}
it("collapses dots at the beginning of the String") {
StringUtils.collapseDots("....foo.bar") should equal ("foo.bar")
}
it("collapses dots at the end of the String") {
StringUtils.collapseDots("foo.bar....") should equal ("foo.bar")
}
it("collapses dots in the middle of the String") {
StringUtils.collapseDots("foo....bar...baz") should equal ("foo.bar.baz")
}
it("works on empty string") {
StringUtils.collapseDots("") should equal ("")
}
it("works on just dots") {
StringUtils.collapseDots(".") should equal ("")
StringUtils.collapseDots("..") should equal ("")
StringUtils.collapseDots(".....") should equal ("")
}
it("doesn't modify an already valid String") {
val s = "foo.bar.baz"
StringUtils.collapseDots(s) should be theSameInstanceAs (s)
}
}
describe("replace") {
it("doesn't replace anything in empty Strings") {
StringUtils.replace("", "foo", "bar") should equal ("")
}
it("replaces repeated occurrences") {
StringUtils.replace("queued", "ue", "") should equal ("qd")
}
it("doesn't replace non-matching String") {
StringUtils.replace("queued", "zz", "") should equal ("queued")
}
it("can replace with a longer String") {
StringUtils.replace("abXYab", "ab", "foobar") should equal ("foobarXYfoobar")
}
it("will not allow replacing an empty string") {
an[IllegalArgumentException] shouldBe thrownBy {
StringUtils.replace("abXYab", "", "a")
}
}
}
}
|
<filename>nitro-python/nssrc/com/citrix/netscaler/nitro/resource/config/cluster/clusternodegroup.py
#
# Copyright (c) 2008-2016 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class clusternodegroup(base_resource) :
""" Configuration for Node group object type resource. """
def __init__(self) :
self._name = None
self._strict = None
self._sticky = None
self._state = None
self._priority = None
self._currentnodemask = None
self._backupnodemask = None
self._boundedentitiescntfrompe = None
self._activelist = None
self._backuplist = None
self.___count = None
@property
def name(self) :
r"""Name of the nodegroup. The name uniquely identifies the nodegroup on the cluster.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
r"""Name of the nodegroup. The name uniquely identifies the nodegroup on the cluster.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def strict(self) :
r"""Specifies whether cluster nodes, that are not part of the nodegroup, will be used as backup for the nodegroup.
* Enabled - When one of the nodes goes down, no other cluster node is picked up to replace it. When the node comes up, it will continue being part of the nodegroup.
* Disabled - When one of the nodes goes down, a non-nodegroup cluster node is picked up and acts as part of the nodegroup. When the original node of the nodegroup comes up, the backup node will be replaced.<br/>Default value: NO<br/>Possible values = YES, NO.
"""
try :
return self._strict
except Exception as e:
raise e
@strict.setter
def strict(self, strict) :
r"""Specifies whether cluster nodes, that are not part of the nodegroup, will be used as backup for the nodegroup.
* Enabled - When one of the nodes goes down, no other cluster node is picked up to replace it. When the node comes up, it will continue being part of the nodegroup.
* Disabled - When one of the nodes goes down, a non-nodegroup cluster node is picked up and acts as part of the nodegroup. When the original node of the nodegroup comes up, the backup node will be replaced.<br/>Default value: NO<br/>Possible values = YES, NO
"""
try :
self._strict = strict
except Exception as e:
raise e
@property
def sticky(self) :
r"""Only one node can be bound to nodegroup with this option enabled. It specifies whether to prempt the traffic for the entities bound to nodegroup when owner node goes down and rejoins the cluster.
* Enabled - When owner node goes down, backup node will become the owner node and takes the traffic for the entities bound to the nodegroup. When bound node rejoins the cluster, traffic for the entities bound to nodegroup will not be steered back to this bound node. Current owner will have the ownership till it goes down.
* Disabled - When one of the nodes goes down, a non-nodegroup cluster node is picked up and acts as part of the nodegroup. When the original node of the nodegroup comes up, the backup node will be replaced.<br/>Default value: NO<br/>Possible values = YES, NO.
"""
try :
return self._sticky
except Exception as e:
raise e
@sticky.setter
def sticky(self, sticky) :
r"""Only one node can be bound to nodegroup with this option enabled. It specifies whether to prempt the traffic for the entities bound to nodegroup when owner node goes down and rejoins the cluster.
* Enabled - When owner node goes down, backup node will become the owner node and takes the traffic for the entities bound to the nodegroup. When bound node rejoins the cluster, traffic for the entities bound to nodegroup will not be steered back to this bound node. Current owner will have the ownership till it goes down.
* Disabled - When one of the nodes goes down, a non-nodegroup cluster node is picked up and acts as part of the nodegroup. When the original node of the nodegroup comes up, the backup node will be replaced.<br/>Default value: NO<br/>Possible values = YES, NO
"""
try :
self._sticky = sticky
except Exception as e:
raise e
@property
def state(self) :
r"""State of the nodegroup. All the nodes binding to this nodegroup must have the same state. ACTIVE/SPARE/PASSIVE.<br/>Possible values = ACTIVE, SPARE, PASSIVE.
"""
try :
return self._state
except Exception as e:
raise e
@state.setter
def state(self, state) :
r"""State of the nodegroup. All the nodes binding to this nodegroup must have the same state. ACTIVE/SPARE/PASSIVE.<br/>Possible values = ACTIVE, SPARE, PASSIVE
"""
try :
self._state = state
except Exception as e:
raise e
@property
def priority(self) :
r"""Priority of Nodegroup. This priority is used for all the nodes bound to the nodegroup for Nodegroup selection.<br/>Maximum length = 31.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
r"""Priority of Nodegroup. This priority is used for all the nodes bound to the nodegroup for Nodegroup selection.<br/>Maximum length = 31
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def currentnodemask(self) :
r"""Bitmap of current nodes in this nodegroup.
"""
try :
return self._currentnodemask
except Exception as e:
raise e
@property
def backupnodemask(self) :
r"""Bitmap of backup nodes in this nodegroup.
"""
try :
return self._backupnodemask
except Exception as e:
raise e
@property
def boundedentitiescntfrompe(self) :
r"""Count of bounded entities to this nodegroup accoding to PE.
"""
try :
return self._boundedentitiescntfrompe
except Exception as e:
raise e
@property
def activelist(self) :
r"""Active node list of this nodegroup.
"""
try :
return self._activelist
except Exception as e:
raise e
@property
def backuplist(self) :
r"""Backup node list of this nodegroup.
"""
try :
return self._backuplist
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
r""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(clusternodegroup_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.clusternodegroup
except Exception as e :
raise e
def _get_object_name(self) :
r""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
r""" Use this API to add clusternodegroup.
"""
try :
if type(resource) is not list :
addresource = clusternodegroup()
addresource.name = resource.name
addresource.strict = resource.strict
addresource.sticky = resource.sticky
addresource.state = resource.state
addresource.priority = resource.priority
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ clusternodegroup() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].name = resource[i].name
addresources[i].strict = resource[i].strict
addresources[i].sticky = resource[i].sticky
addresources[i].state = resource[i].state
addresources[i].priority = resource[i].priority
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
r""" Use this API to update clusternodegroup.
"""
try :
if type(resource) is not list :
updateresource = clusternodegroup()
updateresource.name = resource.name
updateresource.strict = resource.strict
updateresource.state = resource.state
updateresource.priority = resource.priority
return updateresource.update_resource(client)
else :
if (resource and len(resource) > 0) :
updateresources = [ clusternodegroup() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].strict = resource[i].strict
updateresources[i].state = resource[i].state
updateresources[i].priority = resource[i].priority
result = cls.update_bulk_request(client, updateresources)
return result
except Exception as e :
raise e
@classmethod
def unset(cls, client, resource, args) :
r""" Use this API to unset the properties of clusternodegroup resource.
Properties that need to be unset are specified in args array.
"""
try :
if type(resource) is not list :
unsetresource = clusternodegroup()
if type(resource) != type(unsetresource):
unsetresource.name = resource
else :
unsetresource.name = resource.name
return unsetresource.unset_resource(client, args)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
unsetresources = [ clusternodegroup() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
unsetresources = [ clusternodegroup() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].name = resource[i].name
result = cls.unset_bulk_request(client, unsetresources, args)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
r""" Use this API to delete clusternodegroup.
"""
try :
if type(resource) is not list :
deleteresource = clusternodegroup()
if type(resource) != type(deleteresource):
deleteresource.name = resource
else :
deleteresource.name = resource.name
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ clusternodegroup() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ clusternodegroup() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
r""" Use this API to fetch all the clusternodegroup resources that are configured on netscaler.
"""
try :
if not name :
obj = clusternodegroup()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = clusternodegroup()
obj.name = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [clusternodegroup() for _ in range(len(name))]
obj = [clusternodegroup() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = clusternodegroup()
obj[i].name = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
r""" Use this API to fetch filtered set of clusternodegroup resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = clusternodegroup()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
r""" Use this API to count the clusternodegroup resources configured on NetScaler.
"""
try :
obj = clusternodegroup()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
r""" Use this API to count filtered the set of clusternodegroup resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = clusternodegroup()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class State:
ACTIVE = "ACTIVE"
SPARE = "SPARE"
PASSIVE = "PASSIVE"
class Strict:
YES = "YES"
NO = "NO"
class Sticky:
YES = "YES"
NO = "NO"
class clusternodegroup_response(base_response) :
def __init__(self, length=1) :
self.clusternodegroup = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.clusternodegroup = [clusternodegroup() for _ in range(length)]
|
package clientAPI.impl;
import javax.smartcardio.Card;
import javax.smartcardio.CardChannel;
import javax.smartcardio.CardException;
import javax.smartcardio.CommandAPDU;
import javax.smartcardio.ResponseAPDU;
/**
* Klasse zur Kapselung der direkten Kommunikation mit der Smartcard.
*
*/
public final class CardConnection {
/**
* Offene Verbindung zur Smartcard
*/
private CardChannel mChannel;
public CardConnection(Card card) {
mChannel = card.getBasicChannel();
}
/**
* Sende Select-Kommando an Applet mit angegebener AID
*
* @param aid
* AID des Applets
* @return Antwort
* @throws CardException
*/
public ResponseAPDU select(byte[] aid) throws CardException {
return sendAPDU(new CommandAPDU(0x00, 0xA4, 0x04, 0x00, aid));
}
/**
* Sende APDU mit direkter Angabe des Befehles.
*
* @param cmd
* Befehl
* @return Antwort
* @throws CardException
*/
public ResponseAPDU sendAPDU(CommandAPDU cmd) throws CardException {
try {
return mChannel.transmit(cmd);
} catch (IllegalStateException e) {
throw new CardException("Nicht zur Karte verbunden");
} catch (NullPointerException e) {
throw new CardException("Keine Antwort erhalten");
}
}
/**
* Sende APDU, unter Angabe von LE, mit aus dem Header abgeleitetem Befehl.
*
* @param header
* Befehls-Header
* @param data
* Zu sendende Daten
* @param expectedResponseLength
* Erwartete Länge
* @return Antwort
* @throws IllegalArgumentException
* @throws CardException
*/
public ResponseAPDU sendAPDU(CommandHeader header, byte[] data, short expectedResponseLength)
throws IllegalArgumentException, CardException {
if (header.definedLC.isPresent()) {
if (header.definedLC.get() != data.length)
throw new IllegalArgumentException("data length deviates from defined LC");
}
if (header.definedLE.isPresent()) {
if (header.definedLE.get() != expectedResponseLength)
throw new IllegalArgumentException("expected LE deviates from defined LE");
}
switch (header.type) {
case NoLC_NoLE:
return sendAPDU(new CommandAPDU(header.CLA, header.INS, header.P1, header.P2));
case NoLC_LE:
return sendAPDU(new CommandAPDU(header.CLA, header.INS, header.P1, header.P2, expectedResponseLength));
case LC_NoLE:
return sendAPDU(new CommandAPDU(header.CLA, header.INS, header.P1, header.P2, data));
case LC_LE:
return sendAPDU(
new CommandAPDU(header.CLA, header.INS, header.P1, header.P2, data, expectedResponseLength));
default:
return null;
}
}
/**
* Sende APDU mit aus dem Header abgeleitetem Befehl.
*
* @param header
* Befehls-Header
* @param data
* Zu sendende Daten
* @return Antwort
* @throws IllegalArgumentException
* @throws CardException
*/
public ResponseAPDU sendAPDU(CommandHeader header, byte[] data) throws IllegalArgumentException, CardException {
if (header.definedLC.isPresent()) {
if (header.definedLC.get() != data.length)
throw new IllegalArgumentException("data length deviates from defined LC");
}
switch (header.type) {
case NoLC_LE:
if (header.definedLE.isPresent()) {
return sendAPDU(new CommandAPDU(header.CLA, header.INS, header.P1, header.P2, header.definedLE.get()));
}
case NoLC_NoLE:
return sendAPDU(new CommandAPDU(header.CLA, header.INS, header.P1, header.P2));
case LC_LE:
if (header.definedLE.isPresent()) {
return sendAPDU(
new CommandAPDU(header.CLA, header.INS, header.P1, header.P2, data, header.definedLE.get()));
}
case LC_NoLE:
return sendAPDU(new CommandAPDU(header.CLA, header.INS, header.P1, header.P2, data));
default:
return null;
}
}
}
|
<filename>admin/vue2/element-admin-v3/node_modules/@antv/g-canvas/esm/util/util.js
export function getPixelRatio() {
return window ? window.devicePixelRatio : 1;
}
/**
* 两点之间的距离
* @param {number} x1 起始点 x
* @param {number} y1 起始点 y
* @param {number} x2 结束点 x
* @param {number} y2 结束点 y
*/
export function distance(x1, y1, x2, y2) {
var dx = x1 - x2;
var dy = y1 - y2;
return Math.sqrt(dx * dx + dy * dy);
}
/**
* 是否在包围盒内
* @param {number} minX 包围盒开始的点 x
* @param {number} minY 包围盒开始的点 y
* @param {number} width 宽度
* @param {number} height 高度
* @param {[type]} x 检测点的 x
* @param {[type]} y 监测点的 y
*/
export function inBox(minX, minY, width, height, x, y) {
return x >= minX && x <= minX + width && y >= minY && y <= minY + height;
}
export function intersectRect(box1, box2) {
return !(box2.minX > box1.maxX || box2.maxX < box1.minX || box2.minY > box1.maxY || box2.maxY < box1.minY);
}
// 合并两个区域
export function mergeRegion(region1, region2) {
if (!region1 || !region2) {
return region1 || region2;
}
return {
minX: Math.min(region1.minX, region2.minX),
minY: Math.min(region1.minY, region2.minY),
maxX: Math.max(region1.maxX, region2.maxX),
maxY: Math.max(region1.maxY, region2.maxY),
};
}
/**
* 判断两个点是否重合,点坐标的格式为 [x, y]
* @param {Array} point1 第一个点
* @param {Array} point2 第二个点
*/
export function isSamePoint(point1, point2) {
return point1[0] === point2[0] && point1[1] === point2[1];
}
export { default as isNil } from '@antv/util/lib/is-nil';
export { default as isString } from '@antv/util/lib/is-string';
export { default as isFunction } from '@antv/util/lib/is-function';
export { default as isArray } from '@antv/util/lib/is-array';
export { default as each } from '@antv/util/lib/each';
export { default as toRadian } from '@antv/util/lib/to-radian';
export { default as mod } from '@antv/util/lib/mod';
export { default as isNumberEqual } from '@antv/util/lib/is-number-equal';
export { default as requestAnimationFrame } from '@antv/util/lib/request-animation-frame';
export { default as clearAnimationFrame } from '@antv/util/lib/clear-animation-frame';
//# sourceMappingURL=util.js.map |
<filename>src/main/java/chylex/hee/world/feature/blobs/populators/BlobPopulatorLiquidPool.java
package chylex.hee.world.feature.blobs.populators;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import net.minecraft.block.Block;
import net.minecraft.init.Blocks;
import chylex.hee.system.abstractions.Pos;
import chylex.hee.system.abstractions.facing.Facing4;
import chylex.hee.world.feature.blobs.StructureWorldBlob;
import chylex.hee.world.util.IRangeGenerator;
import chylex.hee.world.util.IRangeGenerator.RangeGenerator;
import chylex.hee.world.util.RandomAmount;
public class BlobPopulatorLiquidPool extends BlobPopulator{
private Block block = Blocks.air;
private IRangeGenerator amount;
private int minBlocks;
public BlobPopulatorLiquidPool(int weight){
super(weight);
}
public BlobPopulatorLiquidPool setBlock(Block block){
this.block = block;
return this;
}
public BlobPopulatorLiquidPool setAmount(int min, int max){
this.amount = new RangeGenerator(min, max, RandomAmount.linear);
return this;
}
public BlobPopulatorLiquidPool setAmount(IRangeGenerator amount){
this.amount = amount;
return this;
}
public BlobPopulatorLiquidPool setMinBlocks(int minBlocks){
this.minBlocks = minBlocks;
return this;
}
@Override
public void populate(StructureWorldBlob world, Random rand){
int poolsLeft = (amount == null ? 0 : amount.next(rand));
if (poolsLeft <= 0)return;
List<Pos> endStone = world.getEndStoneBlocks();
int attempts = endStone.size()/3;
while(--attempts >= 0 && poolsLeft > 0){
Pos pos = endStone.remove(rand.nextInt(endStone.size()));
if (canBeChanged(world, pos)){
Set<Pos> fillBlocks = new HashSet<>();
fillRecursive(world, pos, fillBlocks);
if (fillBlocks.size() >= minBlocks){
for(Pos toFill:fillBlocks){
world.setBlock(toFill, block);
}
--poolsLeft;
}
}
}
}
private void fillRecursive(StructureWorldBlob world, Pos pos, Set<Pos> fill){
if (!canBeChanged(world, pos) || !fill.add(pos))return;
for(Facing4 facing:Facing4.list){
fillRecursive(world, pos.offset(facing), fill);
}
}
private boolean canBeChanged(StructureWorldBlob world, Pos pos){
if (world.getBlock(pos) != Blocks.end_stone)return false;
if (world.getBlock(pos.getDown()) != Blocks.end_stone)return false;
if (!world.isAir(pos.getUp()))return false;
for(Facing4 facing:Facing4.list){
if (world.getBlock(pos.offset(facing)) != Blocks.end_stone)return false;
}
return true;
}
}
|
#!/usr/bin/env python
# ===========================================================================
# Copyright 2017 `<NAME>`
# Email: ttungl at gmail dot com
#
# Heterogeneous Architecture Configurations Generator for Multi2Sim simulator
# (aka, `HeteroArchGen4M2S`)
# `HeteroArchGen4M2S` is free software, which is freely to be
# redistributed and modified it under the terms of
# the GNU General Public License as published by
# the Free Software Foundation.
# For more details `http://www.gnu.org/licenses`
# `HeteroArchGen4M2S` is written to help you configure M2S
# easily, but non-warranty and non-mechantability.
# ============================================================================
# from file import function
from create_cpu_config import create_cpuconfig
from create_southern_islands_gpuconfig import create_southern_islands_gpuconfig
from create_memconfig import create_memconfig
from create_netconfig import create_netconfig
from create_shell_script import create_shell_script
from create_xml_for_mcpat import create_xml_for_mcpat
# [cpu gpu mc]
# [16 16 4] : 16 nodes/switches on the interconnect
# [48 96 16] : 64 nodes/switches
## CPU Parameters
#### Note: a set of CPU includes two cores.
#### Each core in the set can have its own L1$ (Data&Instr)
#### or it can share the Instruction-L1$ with the other core in that set.
#### by enabling `L1_Inst_shared` flag in the CPU Memory Parameters settings.
num_of_cpu_cores = 16
cpu_frequency = 3400 ## 3.4GHz
num_of_threads = 1
ROB_size = 128
pipelines_size = 4
bimod_size = 4*1024
bpred_size = 1*1024
x86_max_inst = 100000000
## GPU Parameters
#### Note: a set of GPU includes four compute units.
#### Each two units share with one L1$.
#### Each two L1$ shares with one L2$.
num_of_gpu_cores = 16 ## the number of compute units of GPUs. (each GPU has 4 units.)
type_of_gpu = 'SouthernIslands' ## Note, multi2sim-5.0 does support different types of GPUs, see in Manual.
## CPU Memory Parameters
num_of_MC = 4 # number of memory controllers; [2, 4, 8, 16]
L1_Inst_shared = 0 # enable/disable (1/0) shared Instruction L1$
L1_size = 32 # size of L1$ (kB); [16, 32, 64]
L1_assoc = 1 # associativity of L1$ (#-way) full-assoc
L2_size = 512 # size of L2$ (kB); [256, 512, 1024]
L2_assoc = 8 # associativity of L2$ (#-way); [4, 8, 16]
L1_latency = 1 # latency of L1$ (cycles)
L2_latency = 4 # latency of L2$ (cycles)
L1_blocksize = 64 # blocksize of L1$ (Bytes)
L2_blocksize = 64 # blocksize of L2$ (Bytes)
Memory_latency = 100 # latency of DRAM main memory
## GPU Memory Parameters (iiswc16: <NAME>)
GPU_L1_size = 64 # size of L1$ (kB)
GPU_L1_assoc = 4 # associativity of L1$ (kB)
GPU_L2_size = 512 # size of L2$ (kB)
GPU_L2_assoc = 16 # associativity of L2$ (kB)
GPU_L1_latency = 22 # latency of L1$ (ns)
GPU_L2_latency = 63 # latency of L2$ (ns)
GPU_L1_blocksize = 64 # blocksize of L1$ (Bytes)
GPU_L2_blocksize = 64 # blocksize of L2$ (Bytes)
## NOTICE: L3$ shared caches for CPUs and GPUs can be extended if needed. (need a little more work!)
# ```
# ------- An Example of a Heterogeneous CPU-GPU Architecture --------
# |~~CPU~~|..|~~CPU~~~||~~~~~~~~GPU~~~~~~~|..|~~~~~~~~~~GPU~~~~~~~~~|
# |-------| |--------||------------------| |----------------------|
# ||c0||c1|..|c14||c15||cu0||cu1||cu2||cu3|..|cu12||cu13||cu14||cu15|
# |-------| |--------||------------------| |----------------------|
# | | | | \ / \ / \ / \ /
# ---- ---- ---- ---- ----- ----- ----- -----
# |L1$||L1$| |L1$||L1$| |L1$| |L1$| |L1$| |L1$|
# |D/I||D/I| |D/I||D/I| ----- ----- ----- -----
# ---- ---- ---- ---- | | | |
# | | | | ------------net_g0 -------------net_g3
# ------- -------net_c7 | |
# | net_c0 | | |
# ----- ----- ----- -----
# |L2$| |L2$| |L2$| |L2$|
# ----- ----- ----- -----
# | sw0 ... | sw7 | sw8 ... | sw11
# -------------------------------------------------------
# | 2D-Mesh network |
# -------------------------------------------------------
# | sw12 | sw13 | sw14 | sw15 net-l2-mm
# ----- ----- ----- -----
# |MM0| |MM1| |MM2| |MM3|
# ----- ----- ----- -----
# * net_c0: net-cpu-l1-l2-0
# * net_c7: net-cpu-l1-l2-7
# * net_g0: net-gpu-l1-l2-0
# * net_g3: net-gpu-l1-l2-3
# ```
## Calculate the number of L1 caches and L2 caches
# Each core of CPU has one separate L1$.
# Each two-compute units of GPU has one shared L1$.
# Each two L1$ of CPU have one shared L2$.
# Each two L1$ of GPU have one shared L2$.
# All of L2$ connect to the NOC.
numL1cpu = num_of_cpu_cores
numL1gpu = num_of_gpu_cores/2
numL1 = numL1cpu + numL1gpu
##
numL2cpu = numL1cpu/2
numL2gpu = numL1gpu/2
numL2 = numL2cpu + numL2gpu
##
num_cache_levels = 2
## [65, 45, 32, 22, 16]
core_tech_node = 45
## List of benchmarks:
# splash2-benchmark = ['radix', 'fmm', 'barnes', 'cholesky', 'fft', 'lu', 'ocean', 'radiosity', 'raytrace', 'water-nsquared', 'water-spatial']
# hetero-mark-benchmark = ['aes', 'fir', 'histogram', 'kmeans', 'page_rank']
# amdsdk2.5-benchmark = ['BinarySearch']
benchmark = 'radix'
if benchmark == '':
benchmark = 'default_mm'
## [0] disable synthetic workload (using benchmarks),
## [1] enable synthetic workload (not using benchmarks).
# synthetic_workload = 1
## injection rate for synthetic traffic
injection_rate = '0.1'
# numThreads_benchmark = [8, 16, 32, 48, 56, 64, 128, 256]
numThreads_benchmark = 32
## Network Parameters
#### Notice: source-destination nodes' id from the input files should start at 1, not zero.
## For a customized 2D-Mesh network
# HYBRIDLINKS_PATH = 'results_hybrid_local_links/test_topoA_hybridlinks_4x4.txt'
# LOCALLINKS_PATH = 'results_hybrid_local_links/test_topoA_locallinks_4x4.txt'
HYBRIDLINKS_PATH = 'results_hybrid_local_links/topoA_hybridlinks_sync_025_size8x8_normalize_cplex.txt'
# HYBRIDLINKS_PATH = 'results_hybrid_local_links/topoA_hybridlinks_sync_025_size8x8_normalize_Regression.txt'
LOCALLINKS_PATH = 'results_hybrid_local_links/topoA_locallinks_8x8.txt'
## network_mode:
# [0] default 2D-mesh;
# [1]: Customized 2D-Mesh Network;
# [2]: Torus;
# (optional) [3]: Ring
network_mode = 0
net_max_inst = 100000
network_only = 0 ## [1] for network-only, else [0] (full-system).
#### Base conversion
# link_width = 8 Bytes per cycle
# frequency = 1 GHz
# bandwidth = link_width * frequency = 8 GBps
# For example: If you need a link bandwidth capacity = 40 GBps, you need to update the link_width appropriately.
# link_width = bandwidth/frequency = 40GBps/1GHz = 40 Bytes
# Linkwidth's Range: [8, 16, 32, 40, 48, 56, 64, ...]
## for 64-node
HYBRID_LINKWIDTH = 32 ## Bytes per cycle (frequency*bandwidth(= 2.4GHz * 32Bytes/cyc ~ 80GBps))
LOCAL_LINKWIDTH = 16 ## Bytes per cycle (frequency*bandwidth(= 2.4GHz * 16Bytes/cyc ~ 40GBps))
## main()
def main():
# Caches and memory latency calculation for memconfig
#### for more specific on latency, a user might run offline CACTI to obtain these parameters.
# Methods for creating the configuration files
create_cpuconfig(num_of_cpu_cores, cpu_frequency, num_of_threads, ROB_size, pipelines_size, bimod_size, bpred_size);
if type_of_gpu == 'SouthernIslands':
create_southern_islands_gpuconfig(num_of_gpu_cores);
num_nodes = create_memconfig(num_of_cpu_cores, \
num_of_gpu_cores, \
type_of_gpu, \
num_of_MC, \
L1_Inst_shared, \
L1_size, \
L1_assoc, \
L2_size, \
L2_assoc, \
L1_latency, \
L2_latency, \
L1_blocksize, \
L2_blocksize, \
Memory_latency, \
GPU_L1_size, \
GPU_L1_assoc, \
GPU_L2_size, \
GPU_L2_assoc, \
GPU_L1_latency, \
GPU_L2_latency, \
GPU_L1_blocksize, \
GPU_L2_blocksize);
create_netconfig(num_nodes, L2_blocksize, network_mode, LOCALLINKS_PATH, HYBRIDLINKS_PATH, \
LOCAL_LINKWIDTH, HYBRID_LINKWIDTH)
create_shell_script(num_of_cpu_cores, num_of_gpu_cores, type_of_gpu, \
x86_max_inst, benchmark, net_max_inst, network_only, numThreads_benchmark)
create_xml_for_mcpat(num_nodes, numL1, numL2, num_cache_levels, num_of_MC, ROB_size, benchmark, core_tech_node, cpu_frequency)
if __name__ == "__main__": main()
|
package com.github.teocci.android.bluetoothrecycleview.adapters;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.github.teocci.android.bluetoothrecycleview.R;
import com.github.teocci.android.bluetoothrecycleview.holders.DeviceHolder;
import com.github.teocci.android.bluetoothrecycleview.interfaces.BluetoothRequestListener;
import com.github.teocci.android.bluetoothrecycleview.model.Device;
import java.util.List;
/**
* Created by teocci.
*
* @author <EMAIL> on 2018-Sep-06
*/
public class DeviceAdapter extends RecyclerView.Adapter<DeviceHolder>
{
public List<Device> deviceList;
private BluetoothRequestListener listener;
private Context context;
public DeviceAdapter(List<Device> deviceList)
{
this.deviceList = deviceList;
}
public DeviceAdapter(List<Device> deviceList, BluetoothRequestListener listener)
{
this.deviceList = deviceList;
this.listener = listener;
}
@Override
public DeviceHolder onCreateViewHolder(ViewGroup parent, int viewType)
{
context = parent.getContext().getApplicationContext();
View view = LayoutInflater.from(context).inflate(R.layout.bt_device_item, parent, false);
return new DeviceHolder(view , listener);
}
@Override
public void onBindViewHolder(DeviceHolder holder, int position)
{
Device device = deviceList.get(position);
if (device != null) {
holder.setDevice(device);
holder.getTextDeviceName().setText((device.getName() == null || device.getName().isEmpty()) ? "Unknown Device" : device.getName());
holder.getTextDeviceAddress().setText(device.getAddress());
holder.getTextDevicePaired().setText(device.isPaired() ? "Paired" : "Not paired");
holder.getTextDeviceSignal().setText(context.getString(R.string.bt_device_signal, device.getSignal()));
}
}
@Override
public int getItemCount()
{
return deviceList.size();
}
} |
<gh_stars>1-10
"""Leetcode 547. Friend Circles
Medium
URL: https://leetcode.com/problems/friend-circles/
There are N students in a class. Some of them are friends, while some are not.
Their friendship is transitive in nature. For example, if A is a direct friend
of B, and B is a direct friend of C, then A is an indirect friend of C.
And we defined a friend circle is a group of students who are direct or indirect
friends.
Given a N*N matrix M representing the friend relationship between students in
the class. If M[i][j] = 1, then the ith and jth students are direct friends
with each other, otherwise not. And you have to output the total number of
friend circles among all the students.
Example 1:
Input:
[[1,1,0],
[1,1,0],
[0,0,1]]
Output: 2
Explanation:The 0th and 1st students are direct friends, so they are in a friend
circle.
The 2nd student himself is in a friend circle. So return 2.
Example 2:
Input:
[[1,1,0],
[1,1,1],
[0,1,1]]
Output: 1
Explanation:The 0th and 1st students are direct friends, the 1st and 2nd students
are direct friends,
so the 0th and 2nd students are indirect friends. All of them are in the same
friend circle, so return 1.
Note:
- N is in range [1,200].
- M[i][i] = 1 for all students.
- If M[i][j] = 1, then M[j][i] = 1.
"""
class SolutionDFS(object):
def _dfs(self, i, M, visited):
for neighbor, edge in enumerate(M[i]):
if edge == 1 and neighbor not in visited:
visited.add(neighbor)
self._dfs(neighbor, M, visited)
def findCircleNum(self, M):
"""
:type M: List[List[int]]
:rtype: int
Time complexity: O(m*n), where
- m: number of rows in M
- n: number of columns in N
Space complexity: O(m*n).
"""
n = len(M)
if n == 1:
return 1
circles = 0
visited = set()
for i in range(n):
if i not in visited:
circles += 1
self._dfs(i, M, visited)
return circles
def main():
# Output: 2.
M = [[1,1,0],
[1,1,0],
[0,0,1]]
print SolutionDFS().findCircleNum(M)
# Output: 1.
M = [[1,1,0],
[1,1,1],
[0,1,1]]
print SolutionDFS().findCircleNum(M)
if __name__ == '__main__':
main()
|
#!/bin/bash
${CXX:-g++} -fprofile-arcs -ftest-coverage -fPIC -O0 example.cpp -o program
./program
#BEGIN gcovr
gcovr -r . --branches
#END gcovr
rm -f program *.gc*
|
<reponame>jab142/tasktimer<filename>app/src/main/java/com/ergdyne/tasktimer/ExportFragment.java
package com.ergdyne.tasktimer;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Fragment;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.database.Cursor;
import android.os.Bundle;
import android.os.Environment;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import com.ergdyne.lib.AppConstants;
import com.ergdyne.lib.DBMap;
import com.ergdyne.lib.EpicDate;
import com.ergdyne.lib.ErgAlert;
import com.ergdyne.lib.ErgFormats;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by j on 3/23/17.
*/
//The export and import functionality are just bandaids to give a basic backup functionality.
//It would be much nicer to have the SQLite database on the user's phone syncing with a server.
public class ExportFragment extends Fragment {
/**********************/
//variable definitions
/**********************/
private Button exportButton;
private Button importButton;
private Activity mainActivityConnection;
public static String filename = "ERGDATA.txt";
private DBHelper db;
/**********************/
//Fragment lifecycle
/**********************/
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_export, container,false);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState){
mainActivityConnection = getActivity();
exportButton = (Button) view.findViewById(R.id.button_export);
importButton = (Button) view.findViewById(R.id.button_import);
checkWritePermission();
{
//todo move text to the string file since this export has moved from temporary to fixture.
exportButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if(!isExternalStorageAvailable() || !isExternalStorageWritable()) {
ErgAlert.alert(mainActivityConnection, getResources().getString(R.string.err_export_blaim_usb));
}else{
if(exportData()){
ErgAlert.alert(mainActivityConnection,getResources().getString(R.string.warn_success));
}else{
ErgAlert.alert(mainActivityConnection,getResources().getString(R.string.warn_permission_write));}
}
}
});
importButton.setOnClickListener(new View.OnClickListener(){
@Override
public void onClick(View v){
AlertDialog.Builder b = new AlertDialog.Builder(mainActivityConnection);
b.setTitle("Ummm!?");
b.setMessage(getResources().getString(R.string.warn_import_crazy));
b.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if(importData()){
ErgAlert.alert(mainActivityConnection,getResources().getString(R.string.warn_success));
}else{
ErgAlert.alert(mainActivityConnection,getResources().getString(R.string.err_import_failure));}
}
});
b.setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
});
b.setIcon(android.R.drawable.ic_dialog_alert);
b.show();
}
});
}
}
/**********************/
//functions used in lifecycle
/**********************/
private void checkWritePermission(){
//Check for permissions and get if not available.
if(ContextCompat.checkSelfPermission(mainActivityConnection,
android.Manifest.permission.WRITE_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(mainActivityConnection,
new String[]{android.Manifest.permission.WRITE_EXTERNAL_STORAGE},
AppConstants.PERMISSION_WRITE_EXTERNAL
);
}
}
//These could probably be moved somewhere else.
private static boolean isExternalStorageWritable(){
String storageState = Environment.getExternalStorageState();
return !Environment.MEDIA_MOUNTED_READ_ONLY.equals(storageState);
}
private static boolean isExternalStorageAvailable(){
String storageState = Environment.getExternalStorageState();
return Environment.MEDIA_MOUNTED.equals(storageState);
}
/**********************/
//export handling
/**********************/
private boolean exportData() {
//TODO at least convert this to CSV
boolean works;
int rowCount; //Always at least one because of headings!
String s = "\t"; //Column separator
String n = "\n"; //Row separator
String seconds = "(seconds since epoch)";
db = new DBHelper(mainActivityConnection);
Cursor events = db.getAllEvents();
rowCount = events.getCount() + 1; //Add 1 for the header.
List<String> rows = new ArrayList<>(rowCount);
//Add column headers.
rows.add(DBMap.TaskTable.name + s + DBMap.EventTable.start + s + DBMap.EventTable.end + s + DBMap.EventTable.start + seconds + s + DBMap.EventTable.end + seconds + n);
//Load up the Array with data.
while(events.moveToNext()){
String taskName = events.getString(events.getColumnIndex(DBMap.TaskTable.name));
long start = events.getLong(events.getColumnIndex(DBMap.EventTable.start));
long end = events.getLong(events.getColumnIndex(DBMap.EventTable.end));
rows.add(taskName + s + EpicDate.toDateTimeString(start) + s + EpicDate.toDateTimeString(end) + s + start + s + end + n);
}
File file;
FileOutputStream outputStream;
//Try to write the data.
try{
file = new File(Environment.getExternalStorageDirectory(),filename);
outputStream = new FileOutputStream(file);
for(int i=0;i<rowCount;i++) {
outputStream.write(rows.get(i).getBytes());
}
outputStream.close();
works = true;
}catch(IOException e){
e.printStackTrace();
works=false;
}
return works;
}
//TODO this would be nicer if it didn't delete everything,
// but I was in a hurry to get my copy of the app back to running after the need suddenly appeared
// when my Nexus 5x power button went bad.
private boolean importData() {
boolean works;
File file;
BufferedReader bufferedReader;
db = new DBHelper(mainActivityConnection);
try{
file = new File(Environment.getExternalStorageDirectory(),filename);
bufferedReader = new BufferedReader(new FileReader(file));
//Drop and Rebuild the database.
db.resetDB();
String line;
while((line = bufferedReader.readLine())!=null){
String[] columns = line.split("\t");
if(columns.length == 5){
if(isLong(columns[3]) && isLong(columns[4])){
String name = columns[0];
EpicDate start = new EpicDate(Long.parseLong(columns[3]));
EpicDate end = new EpicDate(Long.parseLong(columns[4]));
insertImport(name, start, end);
}else{
}
}else{
}
}
bufferedReader.close();
works = true;
}catch(IOException e){
e.printStackTrace();
works=false;
}
//Find the latest event and use it's end time as start for settings.
//Based on the way export is done, the latest event should be the first event.
//So it should be id 1.
//It would be more stable to actually look for the latest event.
Cursor firstEvent = db.getByID(DBMap.EventTable.table,1);
while(firstEvent.moveToNext()){
db.updateSettings(firstEvent.getLong(firstEvent.getColumnIndex(DBMap.EventTable.end)), DBMap.TaskTable.defaultID);
}
//Return value is to inform dialogs.
return works;
}
private void insertImport(String name, EpicDate start, EpicDate end){
if(start.sEpoch < end.sEpoch && end.sEpoch < db.rightNow()){
long taskID = db.findOrInsertTask(name);
long duration = end.sEpoch - start.sEpoch;
String durationDisplay = ErgFormats.durationHMS(duration);
db.insertEvent(start.sEpoch, end.sEpoch, taskID, duration, durationDisplay);
}
}
//TODO move this to a better location. Like in a library.
public boolean isLong(String input){
try{
Long.parseLong(input);
return true;
}catch(Exception e){
return false;
}
}
}
|
import React, { Component } from "react";
import PropTypes from "prop-types";
class Result extends Component {
constructor(props) {
super(props);
this.state = {
participantFirstName: "",
participantLastName: "",
participantGender: "",
participantOtherGender: "",
participantRace: "",
participantOtherRace: "",
participantAge: "",
participantZipcode: "",
participantEmail: "",
disabledFirstName: false,
disabledLastName: false,
disabledGender: false,
disabledRace: false,
disabledAge: false,
disabledZipcode: false,
disabledEmail: false,
filledInfo: false,
score: null
};
}
componentDidMount = () => {
const answers = this.props.answers.map((answer, key) => (
<li key={key}>{answer}</li>
));
var score = this.props.kiosk ? (
<div>
<h2>
You scored{" "}
<strong>
{this.props.quizResult}/{this.props.total}
</strong>
!
</h2>
<h5>You answered:</h5>
<ul>{answers}</ul>
</div>
) : null;
this.setState({ score });
this.fetchParticipantData();
};
/**
* Fetches participant data. If data already exists, make those inputs disabled.
*/
fetchParticipantData = () => {
let p = this.props.participant;
if (p) {
if (p.firstname) {
this.setState({
disabledFirstName: true,
participantFirstName: p.firstname
});
}
if (p.lastname) {
this.setState({
disabledLastName: true,
participantLastName: p.lastname
});
}
if (p.gender) {
// Check if gender is self-specified
if (
p.gender !== "male" &&
p.gender !== "female" &&
p.gender !== "nonbinary"
) {
this.setState({
disabledGender: true,
participantGender: "other",
participantOtherGender: p.gender
});
} else {
this.setState({
disabledGender: true,
participantGender: p.gender
});
}
}
if (p.race) {
// Check if race is self-specified
if (
p.race !== "white" &&
p.race !== "black" &&
p.race !== "hispanic" &&
p.race !== "latinx" &&
p.race !== "east asian" &&
p.race !== "south asian" &&
p.race !== "american indian" &&
p.race !== "pacific islander"
) {
this.setState({
disabledRace: true,
participantRace: "other",
participantOtherRace: p.race
});
} else {
this.setState({
disabledRace: true,
participantRace: p.race
});
}
}
if (p.age) {
this.setState({
disabledAge: true,
participantAge: p.age
});
}
if (p.zipcode) {
this.setState({
disabledZipcode: true,
participantZipcode: p.zipcode
});
}
if (p.email) {
this.setState({
disabledEmail: true,
participantEmail: p.email
});
}
}
};
/**
* Event handler that updates state with the participant's first name.
* @param {Object} event - The DOM event object used to get the value of the trigger element.
*/
setParticipantFirstName = event => {
this.setState({ participantFirstName: event.target.value });
};
/**
* Event handler that updates state with the participant's last name.
* @param {Object} event - The DOM event object used to get the value of the trigger element.
*/
setParticipantLastName = event => {
this.setState({ participantLastName: event.target.value });
};
/**
* Event handler that updates state with the participant's gender.
* @param {Object} event - The DOM event object used to get the value of the trigger element.
*/
setParticipantGender = event => {
this.setState({ participantGender: event.target.value });
};
/**
* Event handler that updates state with the participant's gender if an "other" value was inputted.
* @param {Object} event - The DOM event object used to get the value of the trigger element.
*/
setParticipantOtherGender = event => {
this.setState({ participantOtherGender: event.target.value });
};
/**
* Event handler that updates state with the participant's race.
* @param {Object} event - The DOM event object used to get the value of the trigger element.
*/
setParticipantRace = event => {
this.setState({ participantRace: event.target.value });
};
/**
* Event handler that updates state with the participant's race if an "other value was inputted".
* @param {Object} event - The DOM event object used to get the value of the trigger element.
*/
setParticipantOtherRace = event => {
this.setState({ participantOtherRace: event.target.value });
};
/**
* Event handler that updates state with the participant's age.
* @param {Object} event - The DOM event object used to get the value of the trigger element.
*/
setParticipantAge = event => {
this.setState({ participantAge: event.target.value });
};
/**
* Event handler that updates state with the participant's zipcode.
* @param {Object} event - The DOM event object used to get the value of the trigger element.
*/
setParticipantZipcode = event => {
this.setState({ participantZipcode: event.target.value });
};
/**
* Event handler that updates state with the participant's email.
* @param {Object} event - The DOM event object used to get the value of the trigger element.
*/
setParticipantEmail = event => {
this.setState({ participantEmail: event.target.value });
};
/**
* Passing all participant data back to the App component to post to Firebase.
*/
submitParticipantInfo = () => {
// Getting other gender input if there is a need to specify
let gender =
this.state.participantGender === "other"
? this.state.participantOtherGender
: this.state.participantGender;
// Getting other race input if there is a need to specify
let race =
this.state.participantRace === "other"
? this.state.participantOtherRace
: this.state.participantRace;
let newParticipant = {
id: this.props.participant.id,
firstname: this.state.participantFirstName,
lastname: this.state.participantLastName,
email: this.state.participantEmail,
age: this.state.participantAge,
gender: gender,
race: race,
zipcode: this.state.participantZipcode
};
this.props.submitParticipantInfo(newParticipant);
};
/**
* Determines whether to show an end screen or a form to input user data.
* @returns {JSX} The result component.
*/
renderResult() {
// Pre-quiz: show end screen
if (
this.props.quizType === "pre" ||
this.props.session.type === "child" ||
this.state.filledInfo
) {
return (
<div>
<h2>You've finished the quiz!</h2>
<button className="button" onClick={this.props.restartQuiz}>
Return to starting screen
</button>
</div>
);
}
// Post-quiz: participant can input info
else if (this.props.quizType === "post") {
return (
<div>
<h2>Please complete your participant information</h2>
<div className="form-field-container">
<div className="form-left secondary">
<h5 className="form-label">First Name</h5>
<input
type="text"
value={this.state.participantFirstName}
onChange={this.setParticipantFirstName}
disabled={this.state.disabledFirstName}
/>
</div>
<div className="form-left secondary">
<h5 className="form-label">Last Name</h5>
<input
type="text"
value={this.state.participantLastName}
onChange={this.setParticipantLastName}
disabled={this.state.disabledLastName}
/>
</div>
</div>
<div className="form-field-container">
<div className="form-left secondary">
<h5 className="form-label">Gender</h5>
<select
name="type"
value={this.state.participantGender}
onChange={this.setParticipantGender}
disabled={this.state.disabledGender}
>
<option value="" style={{ display: "none" }} />
<option value="male">Male</option>
<option value="female">Female</option>
<option value="nonbinary">Nonbinary</option>
<option value="other">
Different identity (please specify)
</option>
</select>
</div>
{this.state.participantGender === "other" ? (
<div className="form-left secondary other">
<h5 className="form-label">Other gender</h5>
<input
type="text"
value={this.state.participantOtherGender}
onChange={this.setParticipantOtherGender}
disabled={this.state.disabledGender}
/>
</div>
) : (
<div />
)}
</div>
<div className="form-field-container">
<div className="form-left secondary">
<h5 className="form-label">Race/Ethnicity</h5>
<select
name="type"
value={this.state.participantRace}
onChange={this.setParticipantRace}
disabled={this.state.disabledRace}
>
<option value="" style={{ display: "none" }} />
<option value="white">White/Caucasian</option>
<option value="black">Black/African-American</option>
<option value="hispanic">Hispanic</option>
<option value="latinx">Latino/a</option>
<option value="east asian">
East Asian (e.g., Chinese, Japanese, Vietnamese)
</option>
<option value="south asian">
South Asian (e.g., Indian, Pakistani, Burmese)
</option>
<option value="american indian">
American Indian or Alaskan Native
</option>
<option value="pacific islander">
Pacific Islander or Native Hawaiian
</option>
<option value="other">
Not listed here or prefer to self-describe
</option>
</select>
</div>
{this.state.participantRace === "other" ? (
<div className="form-left secondary other">
<h5 className="form-label">Other race</h5>
<input
type="text"
value={this.state.participantOtherRace}
onChange={this.setParticipantOtherRace}
disabled={this.state.disabledRace}
/>
</div>
) : (
<div />
)}
</div>
<div className="form-field-container">
<div className="form-left secondary">
<h5 className="form-label">Age</h5>
<input
type="number"
value={this.state.participantAge}
onChange={this.setParticipantAge}
disabled={this.state.disabledAge}
/>
</div>
<div className="form-left secondary">
<h5 className="form-label">Zip code</h5>
<input
type="text"
value={this.state.participantZipcode}
onChange={this.setParticipantZipcode}
disabled={this.state.disabledZipcode}
/>
</div>
</div>
<div className="form-field-container">
<div className="form-left secondary">
<h5 className="form-label">Email</h5>
<input
type="email"
value={this.state.participantEmail}
onChange={this.setParticipantEmail}
disabled={this.state.disabledEmail}
/>
</div>
</div>
<button className="button" onClick={this.submitParticipantInfo}>
Submit
</button>
</div>
);
}
}
/**
* A wrapper for renderResult.
* @returns {JSX} The result component.
*/
render() {
return (
<div id="result" className="card-form">
{this.renderResult()}
<div className="credits">
Major support provided by the Andrew W. Mellon Foundation. Generous
support provided by <NAME>, <NAME> and <NAME>,
<NAME>, John and <NAME>, <NAME> and <NAME>, Jr., Richard and <NAME>, Ken and <NAME>, Alfred
and <NAME>, Leni and <NAME>, Jr., <NAME>,
John and <NAME>, Joan and <NAME>, and donors to the 2018 Mia
Gala.
</div>
</div>
);
}
}
Result.propTypes = {
quizResult: PropTypes.number.isRequired,
total: PropTypes.number.isRequired,
restartQuiz: PropTypes.func.isRequired,
answers: PropTypes.array.isRequired,
kiosk: PropTypes.bool.isRequired,
quizType: PropTypes.string.isRequired,
participant: PropTypes.object.isRequired,
submitParticipantInfo: PropTypes.func.isRequired,
session: PropTypes.object.isRequired
};
export default Result;
|
<filename>src/dijkstra/Boj11900.java
package dijkstra;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.PriorityQueue;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 11900번: 차이 그래프
*
* @see https://www.acmicpc.net/problem/11900/
*
*/
public class Boj11900 {
private static final String NEW_LINE = "\n";
private static final int INF = 2_000_000_000;
private static int N;
private static int[] A;
private static int[] dist;
private static int[] parent;
private static class Node implements Comparable<Node> {
int node;
int cost;
public Node(int node, int cost) {
this.node = node;
this.cost = cost;
}
@Override
public int compareTo(Node n) {
return this.cost < n.cost ? -1: 1;
}
}
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
N = Integer.parseInt(br.readLine());
A = new int[N];
parent = new int[N];
parent[0] = -1;
StringTokenizer st = new StringTokenizer(br.readLine());
for (int i = 1; i < N; i++) {
A[i] = Integer.parseInt(st.nextToken());
parent[i] = -1;
}
makeGraph();
StringBuilder sb = new StringBuilder();
int Q = Integer.parseInt(br.readLine());
while (Q-- > 0) {
st = new StringTokenizer(br.readLine());
int s = Integer.parseInt(st.nextToken());
int e = Integer.parseInt(st.nextToken());
int idx = e - s;
if(idx < 0) idx += N;
s = find(s);
e = find(e);
sb.append(e != s ? -1: dist[idx]).append(NEW_LINE); // shortest path
}
System.out.println(sb.toString());
}
private static void makeGraph() {
dist = new int[N];
Arrays.fill(dist, INF);
for(int i = 0; i < N; i++) {
if(dist[i] != INF) continue;
dijkstra(i);
}
}
private static void dijkstra(int start) {
PriorityQueue<Node> pq = new PriorityQueue<>();
pq.offer(new Node(start, 0));
dist[start] = 0;
while(!pq.isEmpty()) {
Node current = pq.poll();
if(current.cost > dist[current.node]) continue;
for(int next = 0; next < N; next++) {
if(current.node == next) continue;
int diff = current.node - next;
if(diff < 0) diff += N;
if(A[diff] == 0) continue;
if(dist[next] <= dist[current.node] + A[diff]) continue;
dist[next] = dist[current.node] + A[diff];
merge(start, next); // make path set
pq.offer(new Node(next, dist[next]));
}
}
}
private static int find(int x) {
if(parent[x] < 0) return x;
return parent[x] = find(parent[x]);
}
private static void merge (int x, int y) {
x = find(x);
y = find(y);
if(x == y) return;
if(parent[x] < parent[y]) {
parent[x] += parent[y];
parent[y] = x;
}
else {
parent[y] += parent[x];
parent[x] = y;
}
}
}
|
// +build !unit
package eventstore_test
import (
"context"
"errors"
"sync"
"testing"
"time"
"github.com/GabrielCarpr/cqrs/bus"
"github.com/GabrielCarpr/cqrs/eventstore"
"github.com/GabrielCarpr/cqrs/eventstore/memory"
"github.com/GabrielCarpr/cqrs/eventstore/postgres"
"github.com/GabrielCarpr/cqrs/log"
"github.com/google/uuid"
"github.com/stretchr/testify/suite"
"golang.org/x/sync/errgroup"
)
type TestEvent struct {
bus.EventType
Name string
Age int
}
func Buffer(id uuid.UUID) bus.EventBuffer {
return bus.NewEventBuffer(id, "testEntity")
}
func (TestEvent) Event() string {
return "test.event"
}
func TestMemoryEventStore(t *testing.T) {
s := &EventStoreBlackboxTest{factory: func() bus.EventStore {
return &memory.MemoryEventStore{}
}}
suite.Run(t, s)
}
func TestPostgresEventStore(t *testing.T) {
c := postgres.Config{
DBName: "cqrs",
DBPass: "<PASSWORD>",
DBHost: "db",
DBUser: "cqrs",
}
s := &EventStoreBlackboxTest{
factory: func() bus.EventStore {
return postgres.New(c)
},
}
s.setupHook = func() error {
schema := postgres.PostgreSQLSchema{Config: c}
schema.Reset()
return nil
}
s.changeNow = func(t time.Time) {
postgres.Now = func() time.Time {
return t
}
}
suite.Run(t, s)
}
/**
Test Suite
*/
type EventStoreBlackboxTest struct {
suite.Suite
factory func() bus.EventStore
setupHook func() error
changeNow func(time.Time)
entity uuid.UUID
buffer bus.EventBuffer
otherBuffer bus.EventBuffer
store bus.EventStore
}
func (s *EventStoreBlackboxTest) SetupTest() {
s.store = s.factory()
s.entity = uuid.New()
s.buffer = Buffer(s.entity)
s.otherBuffer = Buffer(uuid.New())
bus.RegisterMessage(&TestEvent{})
log.SetLevel(log.WARN)
if s.setupHook != nil {
err := s.setupHook()
if err != nil {
panic(err)
}
}
}
func (s *EventStoreBlackboxTest) TearDownTest() {
err := s.store.Close()
if err != nil {
panic(err)
}
}
func (s EventStoreBlackboxTest) TestAppendsEventsAndStreams() {
e := &TestEvent{Name: "Gabriel", Age: 24}
s.buffer.Buffer(true, e)
evs := s.buffer.Events(context.Background())
err := s.store.Append(
context.Background(),
bus.ExpectedVersion(s.buffer.CurrentVersion()),
evs...,
)
s.NoError(err)
e2 := &TestEvent{Name: "Gabriel", Age: 24}
s.otherBuffer.Buffer(true, e2)
evs2 := s.otherBuffer.Events(context.Background())
err = s.store.Append(
context.Background(),
bus.ExpectedVersion(s.otherBuffer.CurrentVersion()),
evs2...,
)
s.NoError(err)
query := bus.Select{
StreamID: bus.StreamID{Type: "testEntity", ID: s.entity.String()},
From: 0,
}
stream := make(chan bus.Event)
results := make([]bus.Event, 0)
ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*20)
defer cancel()
group, ctx := errgroup.WithContext(ctx)
group.Go(func() error {
return s.store.Stream(ctx, stream, query)
})
loop:
for {
select {
case <-ctx.Done():
break loop
case ev, ok := <-stream:
if !ok {
break loop
}
results = append(results, ev)
}
}
err = group.Wait()
s.Require().NoError(err)
s.Len(results, 1)
}
func (s EventStoreBlackboxTest) TestAppendsEventAndStreamsFrom() {
events := make([]bus.Event, 10)
for i := 0; i < 10; i++ {
events[i] = &TestEvent{Name: "Gabriel", Age: 20 + i}
}
s.buffer.Buffer(true, events...)
err := s.store.Append(context.Background(), bus.ExpectedVersion(s.buffer.Version), s.buffer.Events(context.Background())...)
s.NoError(err)
var result []bus.Event
stream := make(chan bus.Event)
group, ctx := errgroup.WithContext(context.Background())
group.Go(func() error {
return s.store.Stream(ctx, stream, bus.Select{
StreamID: bus.StreamID{ID: s.entity.String(),
Type: "testEntity"},
From: 6,
})
})
for event := range stream {
result = append(result, event)
}
err = group.Wait()
s.NoError(err)
s.Len(result, 5)
}
func (s EventStoreBlackboxTest) TestStreamsAll() {
e := &TestEvent{Name: "Gabriel", Age: 24}
s.buffer.Buffer(true, e)
evs := s.buffer.Events(context.Background())
err := s.store.Append(
context.Background(),
bus.ExpectedVersion(s.buffer.CurrentVersion()),
evs...,
)
s.NoError(err)
e2 := &TestEvent{Name: "Gabriel", Age: 24}
s.otherBuffer.Buffer(true, e2)
evs2 := s.otherBuffer.Events(context.Background())
err = s.store.Append(
context.Background(),
bus.ExpectedVersion(s.otherBuffer.CurrentVersion()),
evs2...,
)
s.NoError(err)
results := make([]bus.Event, 0)
stream := make(chan bus.Event)
group, ctx := errgroup.WithContext(context.Background())
group.Go(func() error {
return s.store.Stream(ctx, stream, bus.Select{})
})
for event := range stream {
results = append(results, event)
}
err = group.Wait()
s.NoError(err)
s.Len(results, 2)
}
func (s EventStoreBlackboxTest) TestOptimisticLocking() {
e := &TestEvent{Name: "Gabriel", Age: 24}
s.buffer.Buffer(true, e)
err := s.store.Append(
context.Background(),
bus.ExpectedVersion(s.buffer.CurrentVersion()),
s.buffer.Events(context.Background())...,
)
s.NoError(err)
// A concurrent append will now attempt to store
s.buffer.ForceVersion(0)
e = &TestEvent{Name: "Gabriel", Age: 27}
s.buffer.Buffer(true, e)
err = s.store.Append(
context.Background(),
bus.ExpectedVersion(s.buffer.CurrentVersion()),
s.buffer.Events(context.Background())...,
)
s.Require().Error(err)
s.EqualError(err, eventstore.ErrConcurrencyViolation.Error())
}
func (s EventStoreBlackboxTest) TestEnforcesSameStreamAppends() {
e := &TestEvent{Name: "Gabriel", Age: 23}
e.ForAggregate("lol")
e.OwnedBy(uuid.New().String())
e2 := &TestEvent{Name: "Giddian", Age: 99}
e2.ForAggregate("yomp")
e2.OwnedBy(uuid.New().String())
err := s.store.Append(context.Background(), bus.ExpectedVersion(0), e, e2)
s.Require().Error(err)
s.EqualError(err, eventstore.ErrConsistencyViolation.Error())
}
func (s EventStoreBlackboxTest) TestSubscribesAll() {
ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*100)
defer cancel()
for i := 0; i < 5; i++ {
event := &TestEvent{Name: "Gabriel", Age: 25 + i}
s.buffer.Buffer(true, event)
err := s.store.Append(ctx, bus.ExpectedVersion(s.buffer.Version), s.buffer.Events(context.Background())...)
s.buffer.Commit()
s.Require().NoError(err)
}
var result []bus.Event
err := s.store.Subscribe(ctx, func(e bus.Event) error {
result = append(result, e)
return nil
})
s.Require().NoError(err)
s.Require().Len(result, 5)
s.Equal("Gabriel", result[0].(*TestEvent).Name)
s.Equal(25, result[0].(*TestEvent).Age)
s.Equal(int64(1), result[0].Versioned())
}
func (s EventStoreBlackboxTest) TestNoEventsDoesntCallBack() {
ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*20)
defer cancel()
err := s.store.Subscribe(ctx, func(e bus.Event) error {
s.FailNow("Called back")
cancel()
return nil
})
s.Require().NoError(err)
}
func (s EventStoreBlackboxTest) TestSubscribesConcurrentlyOnceOnly() {
ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*500)
defer cancel()
for i := 0; i < 100; i++ {
event := &TestEvent{Name: "Gabriel", Age: 25 + i}
s.buffer.Buffer(true, event)
err := s.store.Append(ctx, bus.ExpectedVersion(s.buffer.Version), s.buffer.Events(context.Background())...)
s.buffer.Commit()
s.Require().NoError(err, "on append #%d", i)
}
start := make(chan struct{})
results := make(chan bus.Event, 100)
group, ctx := errgroup.WithContext(ctx)
for i := 0; i < 25; i++ {
group.Go(func() error {
<-start
return s.store.Subscribe(ctx, func(e bus.Event) error {
results <- e
return nil
})
})
}
close(start)
err := group.Wait()
s.Require().NoError(err)
end := make([]bus.Event, 0)
for event := range results {
end = append(end, event)
if len(results) == 0 {
close(results)
break
}
}
ages := make(map[int]struct{})
versions := make(map[int64]struct{})
s.Require().Len(end, 100)
for _, event := range end {
if _, ok := ages[event.(*TestEvent).Age]; ok {
s.FailNow("Age already delivered")
}
if _, ok := versions[event.Versioned()]; ok {
s.FailNow("Version already delivered")
}
ages[event.(*TestEvent).Age] = struct{}{}
versions[event.Versioned()] = struct{}{}
}
}
func (s EventStoreBlackboxTest) TestSubscribeErrorNacks() {
ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*500)
defer cancel()
for i := 0; i < 3; i++ {
event := &TestEvent{Name: "Gabriel", Age: 25 + i}
s.buffer.Buffer(true, event)
err := s.store.Append(ctx, bus.ExpectedVersion(s.buffer.Version), s.buffer.Events(context.Background())...)
s.buffer.Commit()
s.Require().NoError(err, "on append #%d", i)
}
received := 0
err := s.store.Subscribe(ctx, func(e bus.Event) error {
s.Equal(25, e.(*TestEvent).Age)
received++
if received >= 3 {
cancel()
}
return errors.New("test error")
})
s.Require().NoError(err)
s.Require().GreaterOrEqual(received, 3)
}
func (s EventStoreBlackboxTest) TestConcurrentAppends() {
ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*3000)
defer cancel()
s.buffer.Buffer(true, &TestEvent{Name: "Gabriel"})
event := s.buffer.Events(context.Background())[0]
start := make(chan struct{})
m := sync.Mutex{}
group, ctx := errgroup.WithContext(ctx)
errors := 0
for i := 0; i < 100; i++ {
i := i
group.Go(func() error {
<-start
err := s.store.Append(ctx, bus.ExpectedVersion(0), event)
if err != nil {
s.Require().EqualError(err, eventstore.ErrConcurrencyViolation.Error(), "Error'd on: %d", i)
m.Lock()
errors++
m.Unlock()
}
return nil
})
}
close(start)
group.Wait()
s.T().Log(s.entity)
s.Equal(99, errors)
}
func (s EventStoreBlackboxTest) TestSubscribesInOrder() {
ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*2000)
defer cancel()
for i := 0; i < 500; i++ {
s.buffer.Buffer(true, &TestEvent{Name: "Gabriel", Age: 25 + i})
event := s.buffer.Events(ctx)[0]
err := s.store.Append(ctx, bus.ExpectedVersion(s.buffer.Version), event)
s.buffer.Commit()
s.Require().NoError(err)
}
results := []int{}
s.store.Subscribe(ctx, func(e bus.Event) error {
age := e.(*TestEvent).Age
results = append(results, age)
if age == 524 {
cancel()
}
return nil
})
starting := 25
for _, age := range results {
s.Require().Equal(starting, age)
starting++
}
}
func (s EventStoreBlackboxTest) TestHandlesPanic() {
ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*400)
defer cancel()
for i := 0; i < 3; i++ {
s.buffer.Buffer(true, &TestEvent{Name: "Gabriel", Age: 25 + i})
event := s.buffer.Events(ctx)[0]
err := s.store.Append(ctx, bus.ExpectedVersion(s.buffer.Version), event)
s.buffer.Commit()
s.Require().NoError(err)
}
results := make(map[int]int)
s.store.Subscribe(ctx, func(e bus.Event) error {
panic("error")
})
ctx, cancel = context.WithTimeout(context.Background(), time.Millisecond*400)
defer cancel()
err := s.store.Subscribe(ctx, func(e bus.Event) error {
results[e.(*TestEvent).Age] += 1
return nil
})
s.NoError(err)
s.GreaterOrEqual(results[25], 1)
s.GreaterOrEqual(results[26], 1)
s.GreaterOrEqual(results[27], 1)
}
// TODO: Add further test to simulate crashing of
// subscribe callback: https://stackoverflow.com/questions/26225513/how-to-test-os-exit-scenarios-in-go
|
#!/bin/bash
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# scripts directory
THIS_DIR=$(cd -P "$(dirname "$(readlink "${BASH_SOURCE[0]}" || echo "${BASH_SOURCE[0]}")")" && pwd)
REACT_NATIVE_ROOT="$THIS_DIR/.."
# Application root directory - General use case: react-native is a dependency
PROJECT_ROOT="$THIS_DIR/../../.."
# shellcheck source=/dev/null
source "${THIS_DIR}/.packager.env"
# check and assign NODE_BINARY env
# shellcheck disable=SC1091
source "${THIS_DIR}/node-binary.sh"
# When running react-native tests, react-native doesn't live in node_modules but in the PROJECT_ROOT
if [ ! -d "$PROJECT_ROOT/node_modules/react-native" ];
then
PROJECT_ROOT="$THIS_DIR/.."
fi
# Start packager from PROJECT_ROOT
cd "$PROJECT_ROOT" || exit
"$NODE_BINARY" "$REACT_NATIVE_ROOT/cli.js" start "$@"
|
#include "game.h"
#include <iostream>
Game::Game(b2World& world, int okrazenia, int liczbaprzeciwnikow, Image& img, RenderWindow& window, int trasa, string strNazwaPng, char* chNazwaTxt) : view(window.GetDefaultView())
{
double pospoc[2] = {0.0, 0.0};
nOkrazenia = okrazenia;
nLiczbaPrzeciwnikow = liczbaprzeciwnikow;
pos[0] = 0;
pos[1] = 0;
indeks = 0;
nOkrazenie = 0;
fCzasyOkrazen = new float[nOkrazenia];
NajlepszeOkrazenie = 0.0f;
LacznyCzas = 0.0f;
dt = 0;
str.SetScale(0.5, 0.5);
str2.SetScale(0.5, 0.5);
str3.SetScale(0.5, 0.5);
str4.SetScale(0.5, 0.5);
str5.SetScale(0.5, 0.5);
nTrasa = trasa;
tTrasa.img.LoadFromFile(strNazwaPng);
tTrasa.fTrasa.open(chNazwaTxt);
if (tTrasa.fTrasa.good())
{
tTrasa.fTrasa >> tTrasa.nRozmiary[0] >> tTrasa.nRozmiary[1];
tTrasa.fTrasa >> tTrasa.nRozmiaryFragmentu[0] >> tTrasa.nRozmiaryFragmentu[1];
tTrasa.fTrasa >> tTrasa.nLiczbaPunktowKontrolnych;
tTrasa.aPunktyKontrolne = new bool[tTrasa.nLiczbaPunktowKontrolnych];
for (int i = 0; i < tTrasa.nLiczbaPunktowKontrolnych; ++i)
tTrasa.aPunktyKontrolne[i] = false;
tTrasa.zakrety.resize(tTrasa.nLiczbaPunktowKontrolnych);
tTrasa.aTablicaFragmentow = new fragment[tTrasa.nRozmiary[0] * tTrasa.nRozmiary[1]];
for (int i = 0; i < tTrasa.nRozmiary[0] * tTrasa.nRozmiary[1]; ++i)
if (!tTrasa.fTrasa.eof())
{
tTrasa.fTrasa >> tTrasa.aTablicaFragmentow[i].nPozycja[0] >> tTrasa.aTablicaFragmentow[i].nPozycja[1] >> tTrasa.aTablicaFragmentow[i].nTyp >> tTrasa.aTablicaFragmentow[i].chInf;
if (tTrasa.aTablicaFragmentow[i].chInf == 'P')
{
tTrasa.fTrasa >> tTrasa.aTablicaFragmentow[i].nKtoryPunkt;
--tTrasa.aTablicaFragmentow[i].nKtoryPunkt;
}
else if (tTrasa.aTablicaFragmentow[i].chInf == 'M')
{
pospoc[0] = tTrasa.aTablicaFragmentow[i].nPozycja[0]/10 + 4;
pospoc[1] = (tTrasa.aTablicaFragmentow[i].nPozycja[1]+tTrasa.nRozmiaryFragmentu[1])/10;
}
tTrasa.fTrasa >> tTrasa.aTablicaFragmentow[i].siInf;
if (tTrasa.aTablicaFragmentow[i].chInf == 'P')
tTrasa.zakrety[tTrasa.aTablicaFragmentow[i].nKtoryPunkt] = zakret(pt::PointMake(tTrasa.aTablicaFragmentow[i].nPozycja[0]/tTrasa.nRozmiaryFragmentu[0], tTrasa.aTablicaFragmentow[i].nPozycja[1]/tTrasa.nRozmiaryFragmentu[1]), tTrasa.aTablicaFragmentow[i].siInf);
}
}
str4.SetText("Najlepsze okrazenie");
tTrasa.trasa.SetImage(tTrasa.img);
SIcars = new SI*[nLiczbaPrzeciwnikow];
car1 = new Car(img, &world, pt::Point(pospoc[0]+6, pospoc[1]));
std::cout << pospoc[0] << ' ' << pospoc[1] << std::endl;
for (int i = 0; i < nLiczbaPrzeciwnikow; ++i)
{
if (i % 2 == 0)
SIcars[i] = new SI(img, &world, &tTrasa, pt::Point(pospoc[0], pospoc[1] + ((i+1)*3)));
else
SIcars[i] = new SI(img, &world, &tTrasa, pt::Point(pospoc[0]+6, pospoc[1] + ((i+1)*3)));
//SIcars[i]->car.SetRotation(0.0f);
}
b2Vec2 vs[4];
vs[0].Set(0, 0);
vs[3].Set((tTrasa.nRozmiary[0]*tTrasa.nRozmiaryFragmentu[0])/10, 0);
vs[2].Set((tTrasa.nRozmiary[0]*tTrasa.nRozmiaryFragmentu[0])/10, (tTrasa.nRozmiary[1]*tTrasa.nRozmiaryFragmentu[1])/10);
vs[1].Set(0, (tTrasa.nRozmiary[1]*tTrasa.nRozmiaryFragmentu[1])/10);
/*b2ChainShape chain;
chain.CreateLoop(vs, 4);*/
b2EdgeShape e1;
e1.Set(vs[0], vs[1]);
b2EdgeShape e2;
e2.Set(vs[1], vs[2]);
b2EdgeShape e3;
e3.Set(vs[2], vs[3]);
b2EdgeShape e4;
e4.Set(vs[3], vs[0]);
b2BodyDef bodyDef2;
bodyDef2.type = b2_staticBody;
m_body = world.CreateBody(&bodyDef2);
b2Fixture* fixtur2 = m_body->CreateFixture(&e1, 1.0f);
b2BodyDef bodyDef3;
bodyDef3.type = b2_staticBody;
m_body = world.CreateBody(&bodyDef3);
b2Fixture* fixtur3 = m_body->CreateFixture(&e2, 1.0f);
b2BodyDef bodyDef4;
bodyDef4.type = b2_staticBody;
m_body = world.CreateBody(&bodyDef4);
b2Fixture* fixtur4 = m_body->CreateFixture(&e3, 1.0f);
b2BodyDef bodyDef5;
bodyDef5.type = b2_staticBody;
m_body = world.CreateBody(&bodyDef5);
b2Fixture* fixtur5 = m_body->CreateFixture(&e4, 1.0f);
/*b2BodyDef bodyDef2;
bodyDef2.type = b2_staticBody;
m_body = world.CreateBody(&bodyDef2);
b2PolygonShape polygonShape2;
polygonShape2.SetAsBox((tTrasa.nRozmiary[0]*tTrasa.nRozmiaryFragmentu[0])/10, 0.1);
b2Fixture* fixture = m_body->CreateFixture(&polygonShape2, 1.0f);
m_body->SetTransform(b2Vec2(0, 0), 0);
b2BodyDef bodyDef3;
bodyDef3.type = b2_staticBody;
m_body1 = world.CreateBody(&bodyDef3);
b2PolygonShape polygonShape3;
polygonShape3.SetAsBox(0.1, (tTrasa.nRozmiary[1]*tTrasa.nRozmiaryFragmentu[1])/10);
b2Fixture* fixture1 = m_body1->CreateFixture(&polygonShape3, 1.0f);
m_body1->SetTransform(b2Vec2(0, 0), 0);
b2BodyDef bodyDef4;
bodyDef4.type = b2_staticBody;
m_body2 = world.CreateBody(&bodyDef4);
b2PolygonShape polygonShape4;
polygonShape4.SetAsBox((tTrasa.nRozmiary[0]*tTrasa.nRozmiaryFragmentu[0])/10, 0.1);
b2Fixture* fixture2 = m_body2->CreateFixture(&polygonShape4, 1.0f);
m_body2->SetTransform(b2Vec2(0, (tTrasa.nRozmiary[1]*tTrasa.nRozmiaryFragmentu[1])/10), 0);
b2BodyDef bodyDef5;
bodyDef5.type = b2_staticBody;
m_body3 = world.CreateBody(&bodyDef5);
b2PolygonShape polygonShape5;
polygonShape5.SetAsBox(0.1, (tTrasa.nRozmiary[1]*tTrasa.nRozmiaryFragmentu[1])/10);
b2Fixture* fixture3 = m_body3->CreateFixture(&polygonShape5, 1.0f);
m_body3->SetTransform(b2Vec2((tTrasa.nRozmiary[0]*tTrasa.nRozmiaryFragmentu[0])/10, 0), 0);*/
}
Game::~Game()
{
if (car1)
delete car1;
for (int i = 0; i < nLiczbaPrzeciwnikow; ++i)
if (SIcars[i])
delete SIcars[i];
if (SIcars)
delete[] SIcars;
if (fCzasyOkrazen)
delete[] fCzasyOkrazen;
if (m_body)
m_body->GetWorld()->DestroyBody(m_body);
if (m_body1)
m_body1->GetWorld()->DestroyBody(m_body1);
if (m_body2)
m_body2->GetWorld()->DestroyBody(m_body2);
if (m_body3)
m_body3->GetWorld()->DestroyBody(m_body3);
}
int Game::Gra(b2World& world, const Input& wejscie, Event event, RenderWindow& window, float* u1, float* u2)
{
FloatRect rec;
rec.left = (car1->GetPosition().x > 320 ? car1->GetPosition().x-320 : 0);
rec.top = (car1->GetPosition().x > 240 ? car1->GetPosition().x-240 : 0);
rec.width = 640;
rec.height = 480;
view.SetFromRect(rec);
str.SetPosition(view.GetRect().Left, view.GetRect().Top);
str2.SetPosition(view.GetRect().Left + 320, view.GetRect().Top);
str3.SetPosition(view.GetRect().Right - 70, view.GetRect().Top);
str4.SetPosition(view.GetRect().Right - str4.GetRect().Right, view.GetRect().Top + 20);
str5.SetPosition(view.GetRect().Right - 70, view.GetRect().Top + 40);
if (!Odliczanie(wejscie, event, window))
return -1;
czas.Reset();
while (window.IsOpened())
{
dt = double(window.GetFrameTime());
while (window.GetEvent(event))
{
if (event.Type == Event::Closed)
{
window.Close();
return -1;
}
if (wejscie.IsKeyDown(Key::Escape))
{
window.Close();
return -1;
}
if (wejscie.IsKeyDown(Key::Right) && wejscie.IsKeyDown(Key::Left))continue;
if (wejscie.IsKeyDown(Key::P))
{
int r = MenuPauza(wejscie, event, window);
if (r == 0)
return 0;
else if (r == -1)
return -1;
}
}
if (wejscie.IsKeyDown(Key::Down))
car1->SetKey(0);
if (wejscie.IsKeyDown(Key::Left))
car1->SetKey(2);
if (wejscie.IsKeyDown(Key::Right))
car1->SetKey(3);
if (wejscie.IsKeyDown(Key::Up))
car1->SetKey(1);
car1->Update();
for (int i = 0; i < nLiczbaPrzeciwnikow; ++i)
{
SIcars[i]->AktualizujPozycje(&tTrasa);
}
if (view.GetRect().Top + car1->GetShift().y >= 0 && view.GetRect().Bottom + car1->GetShift().y <= tTrasa.nRozmiary[1] * tTrasa.nRozmiaryFragmentu[1])
if ((car1->GetShift().y > 0 && view.GetRect().Bottom - car1->GetSprPos().y <= 240) || (car1->GetShift().y < 0 && car1->GetSprPos().y - view.GetRect().Top <= 240))
{
view.Move(0, car1->GetShift().y);
str.Move(0, car1->GetShift().y);
str2.Move(0, car1->GetShift().y);
str3.Move(0, car1->GetShift().y);
str4.Move(0, car1->GetShift().y);
str5.Move(0, car1->GetShift().y);
}
if (view.GetRect().Left + car1->GetShift().x >= 0 && view.GetRect().Right + car1->GetShift().x <= tTrasa.nRozmiary[0] * tTrasa.nRozmiaryFragmentu[0])
if ((car1->GetShift().x > 0 && view.GetRect().Right - car1->GetSprPos().x <= 320) || (car1->GetShift().x < 0 && car1->GetSprPos().x - view.GetRect().Left <= 320))
{
view.Move(car1->GetShift().x, 0);
str.Move(car1->GetShift().x, 0);
str2.Move(car1->GetShift().x, 0);
str3.Move(car1->GetShift().x, 0);
str4.Move(car1->GetShift().x, 0);
str5.Move(car1->GetShift().x, 0);
}
if (car1->GetPosition().x > 0 && car1->GetPosition().x < tTrasa.trasa.GetSize().x && car1->GetPosition().y > 0 && car1->GetPosition().y < tTrasa.trasa.GetSize().y)
{
pos[0] = (car1->GetSprPos().x - (static_cast<int>(car1->GetSprPos().x) % tTrasa.nRozmiaryFragmentu[0])) / tTrasa.nRozmiaryFragmentu[0];
pos[1] = (car1->GetSprPos().y - (static_cast<int>(car1->GetSprPos().y) % tTrasa.nRozmiaryFragmentu[1])) / tTrasa.nRozmiaryFragmentu[1];
if (pos[1] == 1)
indeks = tTrasa.nRozmiary[0] + pos[0];
else if (pos[1] > 1)
indeks = (pos[1] * tTrasa.nRozmiary[0]) + pos[0];
else
indeks = pos[0];
if (tTrasa.aTablicaFragmentow[indeks].chInf == 'P')
tTrasa.aPunktyKontrolne[tTrasa.aTablicaFragmentow[indeks].nKtoryPunkt] = true;
if (tTrasa.aTablicaFragmentow[indeks].chInf == 'M')
{
int n = 0;
for (int i = 0; i < tTrasa.nLiczbaPunktowKontrolnych; ++i)
if (tTrasa.aPunktyKontrolne[i] == true)
++n;
if (n == tTrasa.nLiczbaPunktowKontrolnych)
{
fCzasyOkrazen[nOkrazenie] = czas.GetElapsedTime();
cout << fCzasyOkrazen[nOkrazenie] << endl;
if (nOkrazenie > 0)
{
if (fCzasyOkrazen[nOkrazenie] < fCzasyOkrazen[nOkrazenie-1])
NajlepszeOkrazenie = fCzasyOkrazen[nOkrazenie];
}
else
NajlepszeOkrazenie = fCzasyOkrazen[nOkrazenie];
LacznyCzas += fCzasyOkrazen[nOkrazenie];
++nOkrazenie;
czas.Reset();
cout << NajlepszeOkrazenie << ' ' << LacznyCzas << endl;
if (nOkrazenie >= nOkrazenia)
{
*u1 = LacznyCzas;
*u2 = NajlepszeOkrazenie;
return 1;
}
else
for (int i = 0; i < tTrasa.nLiczbaPunktowKontrolnych; ++i)
tTrasa.aPunktyKontrolne[i] = false;
}
}
}
sprintf(ch, "%f", czas.GetElapsedTime());
str.SetText(ch);
sprintf(ch, "%f", static_cast<int>(car1->GetSpeed()/2));
str2.SetText(ch);
sprintf(ch, "%i", nOkrazenie+1);
str3.SetText(ch);
sprintf(ch, "%f", NajlepszeOkrazenie);
str5.SetText(ch);
window.Draw(tTrasa.trasa);
window.Draw(car1->GetCar());
for (int i = 0; i < nLiczbaPrzeciwnikow; ++i)
window.Draw(SIcars[i]->GetCar());
window.Draw(str);
window.Draw(str2);
window.Draw(str3);
window.Draw(str4);
window.Draw(str5);
window.Display();
window.Clear();
world.Step(static_cast<float>(dt), 6, 2);
}
}
bool Game::Odliczanie(const Input& wejscie, Event event, RenderWindow& window)
{
if (window.IsOpened())
{
Clock czas2;
String str3;
str3.SetColor(Color::White);
int nOdliczanie = 4;
while (nOdliczanie >= -1)
{
if (czas2.GetElapsedTime() >= 1)
{
--nOdliczanie;
czas2.Reset();
}
str3.SetPosition(view.GetRect().Left + 320, view.GetRect().Top + 240);
if (nOdliczanie >= 0)
{
sprintf(ch, "%i", nOdliczanie);
str3.SetText(ch);
}
else
str3.SetText("Go!");
while (window.GetEvent(event))
{
if (event.Type == Event::Closed)
{
window.Close();
return false;
}
if (wejscie.IsKeyDown(Key::Escape))
{
window.Close();
return false;
}
}
car1->Update();
window.Draw(tTrasa.trasa);
window.Draw(car1->GetCar());
for (int i = 0; i < nLiczbaPrzeciwnikow; ++i)
window.Draw(SIcars[i]->GetCar());
window.Draw(str3);
window.Display();
window.Clear();
}
}
return true;
}
int Game::MenuPauza(const Input& wejscie, Event event, RenderWindow& window)
{
int p[2] = {view.GetRect().Left, view.GetRect().Top};
view.Move(-p[0], -p[1]);
String str1;
String str21;
String str31;
str1.SetText("Kontynuuj");
str21.SetText("Menu");
str31.SetText("Wyjscie");
str1.SetCenter(str1.GetRect().Right/2, str1.GetRect().Bottom/2);
str21.SetCenter(str21.GetRect().Right/2, str21.GetRect().Bottom/2);
str31.SetCenter(str31.GetRect().Right/2, str31.GetRect().Bottom/2);
str1.SetPosition(window.GetWidth()/2, window.GetHeight()*0.25);
str21.SetPosition(window.GetWidth()/2, window.GetHeight()*0.50);
str31.SetPosition(window.GetWidth()/2, window.GetHeight()*0.75);
str1.SetColor(Color::Black);
str21.SetColor(Color::Black);
str31.SetColor(Color::Black);
float p1[4] = {str1.GetRect().Left, str1.GetRect().Top, str1.GetRect().Right, str1.GetRect().Bottom};
float p2[4] = {str21.GetRect().Left, str21.GetRect().Top, str21.GetRect().Right, str21.GetRect().Bottom};
float p3[4] = {str31.GetRect().Left, str31.GetRect().Top, str31.GetRect().Right, str31.GetRect().Bottom};
while (window.IsOpened())
{
while (window.GetEvent(event))
{
if (event.Type == Event::Closed)
{
window.Close();
return -1;
}
if (wejscie.IsKeyDown(Key::Escape))
{
window.Close();
return -1;
}
if (wejscie.IsMouseButtonDown(Mouse::Left))
{
if (wejscie.GetMouseX() >= p1[0] && wejscie.GetMouseX() <= p1[2] && wejscie.GetMouseY() >= p1[1] && wejscie.GetMouseY() <= p1[3])
{
window.Clear();
view.Move(p[0], p[1]);
return 1;
}
if (wejscie.GetMouseX() >= p2[0] && wejscie.GetMouseX() <= p2[2] && wejscie.GetMouseY() >= p2[1] && wejscie.GetMouseY() <= p2[3])
{
return 0;
}
if (wejscie.GetMouseX() >= p3[0] && wejscie.GetMouseX() <= p3[2] && wejscie.GetMouseY() >= p3[1] && wejscie.GetMouseY() <= p3[3])
{
window.Close();
return -1;
}
}
}
if (wejscie.GetMouseX() >= p1[0] && wejscie.GetMouseX() <= p1[2] && wejscie.GetMouseY() >= p1[1] && wejscie.GetMouseY() <= p1[3])
str.SetColor(Color::Blue);
else
str.SetColor(Color::Black);
if (wejscie.GetMouseX() >= p2[0] && wejscie.GetMouseX() <= p2[2] && wejscie.GetMouseY() >= p2[1] && wejscie.GetMouseY() <= p2[3])
str2.SetColor(Color::Blue);
else
str2.SetColor(Color::Black);
if (wejscie.GetMouseX() >= p3[0] && wejscie.GetMouseX() <= p3[2] && wejscie.GetMouseY() >= p3[1] && wejscie.GetMouseY() <= p3[3])
str3.SetColor(Color::Blue);
else
str3.SetColor(Color::Black);
window.Draw(str1);
window.Draw(str21);
window.Draw(str31);
window.Display();
window.Clear(Color::White);
}
return 0;
}
|
# Copyright (c) [2022] Huawei Technologies Co.,Ltd.ALL rights reserved.
# This program is licensed under Mulan PSL v2.
# You can use it according to the terms and conditions of the Mulan PSL v2.
# http://license.coscl.org.cn/MulanPSL2
# THIS PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the Mulan PSL v2 for more details.
####################################
# @Author :
# @email :
# @Date :
# @License : Mulan PSL v2
#####################################
from flask import request
from flask_restful import Resource
from flask_pydantic import validate
from server.utils.db import Insert, Delete, Edit, Select
from server.utils.response_util import response_collect
from server.utils.auth_util import auth
from server.model.mirroring import IMirroring, QMirroring, Repo
from server.utils.resource_utils import ResourceManager
from server.schema.base import DeleteBaseModel
from server.schema.mirroring import (
IMirroringBase,
IMirroringUpdate,
QMirroringBase,
QMirroringUpdate,
RepoCreate,
RepoUpdate,
)
class IMirroringItemEvent(Resource):
@auth.login_required()
@response_collect
@validate()
def delete(self, i_mirroring_id):
return ResourceManager("i_mirroring").del_single(i_mirroring_id)
@auth.login_required()
@response_collect
@validate()
def get(self, i_mirroring_id):
return Select(IMirroring, {"id":i_mirroring_id}).single()
@auth.login_required()
@response_collect
@validate()
def put(self, i_mirroring_id, body: IMirroringUpdate):
_body = body.__dict__
_body.update({"id": i_mirroring_id})
return Edit(IMirroring, _body).single(IMirroring, '/imirroring')
class IMirroringEvent(Resource):
@auth.login_required()
@response_collect
@validate()
def post(self, body: IMirroringBase):
return Insert(IMirroring, body.__dict__).single(IMirroring, '/imirroring')
class PreciseGetIMirroring(Resource):
@auth.login_required()
@response_collect
def get(self):
body = request.args.to_dict()
return Select(IMirroring, body).precise()
class QMirroringItemEvent(Resource):
@auth.login_required()
@response_collect
@validate()
def delete(self, q_mirroring_id):
return ResourceManager("q_mirroring").del_single(q_mirroring_id)
@auth.login_required()
@response_collect
@validate()
def get(self, q_mirroring_id):
return Select(QMirroring, {"id":q_mirroring_id}).single()
@auth.login_required()
@response_collect
@validate()
def put(self, q_mirroring_id, body: QMirroringUpdate):
_body = body.__dict__
_body.update({"id": q_mirroring_id})
return Edit(QMirroring, _body).single(QMirroring, '/qmirroring')
class QMirroringEvent(Resource):
@auth.login_required()
@response_collect
@validate()
def post(self, body: QMirroringBase):
return Insert(QMirroring, body.__dict__).single(QMirroring, '/qmirroring')
class PreciseGetQMirroring(Resource):
@auth.login_required()
@response_collect
def get(self):
body = request.args.to_dict()
return Select(QMirroring, body).precise()
class RepoEvent(Resource):
@auth.login_required()
@response_collect
@validate()
def post(self, body: RepoCreate):
return Insert(Repo, body.__dict__).single(Repo, '/repo')
@auth.login_required()
@response_collect
@validate()
def delete(self, body: DeleteBaseModel):
return Delete(Repo, body.__dict__).batch(Repo, '/repo')
@auth.login_required()
@response_collect
@validate()
def put(self, body: RepoUpdate):
return Edit(Repo, body.__dict__).single(Repo, '/repo')
@auth.login_required()
@response_collect
def get(self):
body = request.args.to_dict()
return Select(Repo, body).fuzz()
|
<filename>test/server/serverEnvTest.ts
/// <reference path="../../typings/index.d.ts" />
const Code = require('code'); // assertion library
const Lab = require('lab');
const lab = exports.lab = Lab.script();
const Hapi = require('hapi');
const env = process.env.env;
delete process.env.env;
import server from "../../src/server";
const expect = Code.expect;
lab.describe('misc/routesTest.ts', function () {
lab.it('returns the response result', (done) => {
server.inject('/', function (res) {
expect(res.statusCode).to.equal(404);
process.env.env = env;
done();
});
});
});
|
function generateDynamicButton($label, $url, $options) {
$defaultOptions = ['onclick' => "window.location.href='$url'"];
$options = array_merge($defaultOptions, $options);
return Html::button($label, $options);
} |
#!/bin/bash
export DEBIAN_FRONTEND=noninteractive
apt-get -y update
apt-get -y dist-upgrade
apt-get -y install software-properties-common
apt-add-repository ppa:ansible/ansible
apt-get -y update
apt-get -y install ansible python-pip
pip install jmespath # needed for Ansible json_query
echo 'ubuntu ALL=NOPASSWD:ALL' > /etc/sudoers.d/ubuntu
|
#!/bin/zsh
sed "s/ /,/" wool_extract.txt > mid_1.txt
sed "s/ //" mid_1.txt > mid_2.txt
sed "s/2016/2016,/" mid_2.txt > mid_3.txt
sed "s/2017/2017,/" mid_3.txt > mid_4.txt
sed "s/2018/2018,/" mid_4.txt > mid_5.txt
sed "s/2019/2019,/" mid_5.txt > mid_6.txt
sed "s/2020/2020,/" mid_6.txt > mid_7.txt
sed "s/Date/Date,/" mid_7.txt > mid_8.txt
sed 's/\t//g' mid_8.txt > complete.csv
rm mid**.txt
|
func sum(to n: Int) -> Int {
var result = 0
for number in 1...n { // O(n)
result += number
}
return result
} |
<reponame>mevoly/-<filename>pages/goods_detail/index.js
import{ request } from "../../request/index.js";
import regeneratorRuntime from '../../lib/runtime/runtime';
Page({
data: {
goodsObj:{},
//商品是否被收藏过
isCollect:false
},
GoodsInfo:{},
onShow: function () {
let pages = getCurrentPages();
let currentPage=pages[pages.length-1];
let options=currentPage.options;
const {goods_id}=options;
this.getGoodsDetail(goods_id);
},
async getGoodsDetail(goods_id){
const goodsObj=await request({url:"/goods/detail",data:{goods_id}});
this.GoodsInfo=goodsObj;
// 1 获取缓存中的商品收藏的数组
let collect = wx.getStorageSync("collect") || [];
// 2 判断当前商品是否被收藏
let isCollect = collect.some(v => v.goods_id === this.GoodsInfo.goods_id);
this.setData({
goodsObj:{
goods_name:goodsObj.goods_name,
goods_price:goodsObj.goods_price,
goods_introduce:goodsObj.goods_introduce.replace(/\.webp/g,'.jpg'),
pics:goodsObj.pics
},
isCollect
})
},
//轮播图放大
handlePrevewImage(e){
const urls=this.GoodsInfo.pics.map(v=>v.pics_mid);
const current=e.currentTarget.dataset.url;
wx.previewImage({
current,
urls
});
},
handleCartAdd(){
let cart=wx.getStorageSync("cart")||[];
let index=cart.findIndex(v=>v.goods_id===this.GoodsInfo.goods_id);
if(index===-1){
this.GoodsInfo.num=1;
this.GoodsInfo.checked=true;
cart.push(this.GoodsInfo);
}else{
cart[index].num++;
}
wx.setStorageSync("cart",cart);
wx.showToast({
title: '加入成功',
icon: 'success',
mask: true,
});
},
//点击 商品收藏
handleCollect(){
let isCollect=false;
// 1 获取缓存中的商品收藏数组
let collect=wx.getStorageSync("collect")||[];
// 2 判断该商品是否被收藏过
let index=collect.findIndex(v=>v.goods_id===this.GoodsInfo.goods_id);
// 3 当index!=-1表示 已经收藏过
if(index!==-1){
// 能找到 已经收藏过了 在数组中删除该商品
collect.splice(index,1);
isCollect=false;
wx.showToast({
title: '取消成功',
icon: 'success',
mask: true
});
}else{
// 没有收藏过
collect.push(this.GoodsInfo);
isCollect=true;
wx.showToast({
title: '收藏成功',
icon: 'success',
mask: true
});
}
// 4 把数组存入到缓存中
wx.setStorageSync("collect", collect);
// 5 修改data中的属性 isCollect
this.setData({
isCollect
})
}
}) |
def binary_search(X, target):
# Set the lower limit
lower = 0
# Set the upper limit
upper = len(X) - 1
# Check if list contains 1 element
if upper == 0 and X[0]==target:
return 0
while lower <= upper :
# Calculate the mid point
mid = ( lower + upper ) // 2
# Break if element is found
if X[mid] == target :
return mid
# If element is smaller than mid, then it must lie in the lower half
elif X[mid] > target:
upper = mid -1
# Else the element can only lie in the upper half
else :
lower = mid + 1
if __name__ == '__main__':
X = [1,2,3,4,5,6,7,8,9]
target = 6
print(binary_search(X, target)) |
import vtk
def printme(string):
print(string)
return
printme("abc")
# print(a)
def move_actor(name):
axes = vtk.vtkAxesActor()
axes.SetTotalLength(10, 10, 10)
axes.SetShaftType(0)
axes.SetAxisLabels(0)
axes.SetCylinderRadius(0.02)
print("123")
return
move_actor("a") |
#!/usr/bin/env bash
# A script to deploy all of the microservices to openshift
# OBS. If you`re no running/using https://github.com/fabric8io/fabric8-maven-plugin
# microservice-authz
cd microservice-authz
mvn clean package
declare -a authz_sha256=($(shasum -a 256 target/microservice-authz-swarm.jar))
docker build -t example/microservice-authz:latest -t ${authz_sha256[1]}:${authz_sha256[0]} .
cat src/main/kubernetes/Deployment.yml | sed s/JAR_SHA256_VALUE/${authz_sha256[0]}/ >target/Deployment.yml
oc create -f target/Deployment.yml
oc create -f src/main/kubernetes/Service.yml
oc expose service microservice-authz
cd ..
# microservice-session
cd microservice-session
mvn clean package
declare -a session_sha256=($(shasum -a 256 target/microservice-session-swarm.jar))
docker build -t example/microservice-session:latest -t ${session_sha256[1]}:${session_sha256[0]} .
cat src/main/kubernetes/Deployment.yml | sed s/JAR_SHA256_VALUE/${session_sha256[0]}/ >target/Deployment.yml
oc create -f target/Deployment.yml
oc create -f src/main/kubernetes/Service.yml
oc expose service microservice-session
cd ..
# microservice-schedule
cd microservice-schedule
mvn clean package
docker build -t example/microservice-schedule .
oc create -f src/main/kubernetes/Deployment.yml
oc create -f src/main/kubernetes/Service.yml
oc expose service microservice-schedule
cd ..
# microservice-speaker
cd microservice-speaker
mvn clean package
docker build -t example/microservice-speaker .
oc create -f src/main/kubernetes/Deployment.yml
oc create -f src/main/kubernetes/Service.yml
oc expose service microservice-speaker
cd ..
# microservice-vote
cd microservice-vote
mvn clean package
docker build -t example/microservice-vote .
oc create -f src/main/kubernetes/Deployment.yml
oc create -f src/main/kubernetes/Service.yml
oc expose service microservice-vote
cd ..
|
#!/bin/bash
# inspired/adapted from: https://github.com/leucos/ansible-rsync-backup/blob/master/files/rbackup.sh
set -eu
## Logging functions
function prepare_date() {
date "$@"
}
function log() {
if [ -n "${LOG_FILE}" ]; then
echo "$(prepare_date +%F_%H:%M:%S): ${*}" >> "${LOG_FILE}"
else
echo "$(prepare_date +%F_%H:%M:%S): ${*}"
fi
}
function loginfo() {
log "INFO: ${*}"
}
# Only used if -v --verbose is passed in
function logverbose() {
if ${VERBOSE}; then
log "DEBUG: ${*}"
fi
}
# Pass errors to stderr.
function logerror() {
log "ERROR: ${*}" >&2
let ERROR_COUNT++
}
### Arguments validation
function validate() {
if [ -z "${RSYNC}" ]; then
logerror "Cannot find rsync utility please make sure it is in the PATH"
exit 1
fi
# # Remote source should contain @
# # e.g. foo@server.com:/path
# if [ "${REMOTE_SOURCE/@}" == "${REMOTE_SOURCE}" ]; then
# logerror "Remote source $REMOTE_SOURCE does not contain a SSH username"
# exit 1
# fi
# # Remote source should contain :
# # e.g. foo@server.com:/path
# if [ "${REMOTE_SOURCE/:}" == "${REMOTE_SOURCE}" ]; then
# logerror "Remote source $REMOTE_SOURCE does not contain a valid path"
# exit 1
# fi
if [ -z "${REMOTE_SOURCE}" ]; then
logerror "Remote source (-s) is not set"
exit 1
fi
if [ -z "${LOCAL_DESTINATION}" ]; then
logerror "Local destination (-d) is not set"
exit 1
fi
if [ ! -w "${LOCAL_DESTINATION}" ]; then
logerror "Destination $LOCAL_DESTINATION does exist or is not writable"
exit 1
fi
re='^[0-9]+$'
if ! [[ $KEEP =~ $re ]] ; then
logerror "Provided keep count ($KEEP) is not a number"
fi
}
## Backup
function backup() {
DATE=$(date "+%Y-%m-%dT%H:%M:%S")
loginfo "Creating backup ${DATE} in ${LOCAL_DESTINATION}"
RSYNC_OPTS="-a ${RSYNC_COMPRESS} ${VERBOSE_RSYNC}"
# Sets dry run if needed
$DRY_RUN && RSYNC_OPTS="${RSYNC_OPTS} ${RSYNC_COMPRESS} -n"
logverbose "Executing ${RSYNC} ${RSYNC_OPTS} --link-dest ${LOCAL_DESTINATION}/current/ ${REMOTE_SOURCE} ${LOCAL_DESTINATION}/${DATE}/"
# We check if this is the first backup and skip link-dest
if [ ! -L "${LOCAL_DESTINATION}/current" ]; then
loginfo "First backup - using full mode"
# shellcheck disable=SC2086
${RSYNC} ${RSYNC_OPTS} ${REMOTE_SOURCE} "${LOCAL_DESTINATION}/${DATE}/" >> "${LOG_FILE}" 2>&1
else
loginfo "Diff backup - using link-dest"
# shellcheck disable=SC2086
${RSYNC} ${RSYNC_OPTS} --link-dest "${LOCAL_DESTINATION}/current/" ${REMOTE_SOURCE} "${LOCAL_DESTINATION}/${DATE}/" >> "${LOG_FILE}" 2>&1
fi
sync
logverbose "Symlinking ${LOCAL_DESTINATION}/${DATE}/ to ${LOCAL_DESTINATION}/current/"
if ! $DRY_RUN; then
rm -f "${LOCAL_DESTINATION}/current"
ln -s "${LOCAL_DESTINATION}/${DATE}" "${LOCAL_DESTINATION}/current"
fi
}
# Purge old backups
function purge() {
if [ "$KEEP" -eq 0 ]; then
loginfo "No backups will be purged (-k 0)"
return
fi
# Remove older backups for mysqldump
# COUNT=$(( $(ls -t1 "${LOCAL_DESTINATION}/" | grep -v current -c) - $KEEP ))
shopt -s nullglob
file_arr=(${LOCAL_DESTINATION}/*)
CURRENT="${#file_arr[@]}"
# Remove "current" symlink and requested keep so we end up with count of directories to remove
COUNT=$((CURRENT - 1 - KEEP))
if [ $COUNT -gt 0 ]; then
loginfo "Erasing $COUNT old backups, keeping ${KEEP}"
# shellcheck disable=SC2012
for i in $(ls "${LOCAL_DESTINATION}/" | head -$COUNT); do
loginfo "Erasing ${i}"
${DRY_RUN} || rm -rf "${LOCAL_DESTINATION:?}/${i:?}"
done
else
loginfo "No backup to purge ($((CURRENT - 1 )) present, ${KEEP} to keep)"
fi
}
# Parse arguments
function parse() {
DRY_RUN=false
ERROR_COUNT=0
KEEP=0 # keep everything by default
LOCAL_DESTINATION=""
LOG_FILE="/dev/null"
REMOTE_SOURCE=""
RSYNC=$(which rsync 2> /dev/null) # find rsync
RSYNC_COMPRESS="" # do not compress by default
VERBOSE=false # prints detailed information
VERBOSE_RSYNC="" # add more detail to rsync when verbose mode is active
for arg in "$@"
do
shift
case "$arg" in
"--purge") set -- "$@" "-p" ;;
"--source") set -- "$@" "-s" ;;
"--destination") set -- "$@" "-d" ;;
"--verbose") set -- "$@" "-v" ;;
"--Verbose") set -- "$@" "-V" ;;
"--log") set -- "$@" "-l" ;;
"--dry-run") set -- "$@" "-n" ;;
"--keep") set -- "$@" "-k" ;;
"--compress") set -- "$@" "-c" ;;
*) set -- "$@" "$arg"
esac
done
while getopts 'p:s:d:hvl:nh:k:Vc' OPTION
do
case $OPTION in
k)
KEEP="${OPTARG}"
;;
s)
REMOTE_SOURCE="${OPTARG}"
;;
l)
LOG_FILE="${OPTARG}"
;;
d)
LOCAL_DESTINATION="${OPTARG}"
;;
n)
DRY_RUN=true
;;
v)
VERBOSE=true
;;
V)
VERBOSE_RSYNC="-v"
;;
c)
RSYNC_COMPRESS="-z"
;;
h)
help
exit 0
;;
esac
done
}
parse "$@"
validate
backup
purge
loginfo "Backup completed with ${ERROR_COUNT} errors"
exit $ERROR_COUNT
|
<reponame>bmyte/runelite
import net.runelite.mapping.ObfuscatedName;
@ObfuscatedName("gp")
public interface class194 extends Iterable {
}
|
#!/bin/bash
dieharder -d 201 -g 7 -S 588971838
|
#!/usr/bin/env sh
set -e
#set -x
MIRROR=https://github.com/linkerd/linkerd2/releases/download
dl()
{
local branch=$1
local ver=$2
local os=$3
local suffix=${4:-}
local url=$MIRROR/${branch}-${ver}/linkerd2-cli-$branch-$ver-$os$suffix.sha256
printf " # %s\n" $url
printf " %s: sha256:%s\n" $os $(curl -sSL $url | awk '{print $1}')
}
dl_ver() {
local branch=$1
local ver=$2
printf " '%s':\n" $ver
dl $branch $ver darwin
dl $branch $ver linux-amd64
dl $branch $ver linux-arm
dl $branch $ver linux-arm64
dl $branch $ver windows .exe
}
printf " %s:\n" stable
dl_ver stable ${1:-2.11.0}
#printf " %s:\n" edge
#dl_ver edge 20.1.4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.