text stringlengths 1 1.05M |
|---|
"""
Support for hydrological data from the Federal Office for the Environment FOEN.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.swiss_hydrological_data/
"""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ATTRIBUTION, CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
REQUIREMENTS = ['swisshydrodata==0.0.3']
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by the Swiss Federal Office for the " \
"Environment FOEN"
ATTR_DELTA_24H = 'delta-24h'
ATTR_MAX_1H = 'max-1h'
ATTR_MAX_24H = 'max-24h'
ATTR_MEAN_1H = 'mean-1h'
ATTR_MEAN_24H = 'mean-24h'
ATTR_MIN_1H = 'min-1h'
ATTR_MIN_24H = 'min-24h'
ATTR_PREVIOUS_24H = 'previous-24h'
ATTR_STATION = 'station'
ATTR_STATION_UPDATE = 'station_update'
ATTR_WATER_BODY = 'water_body'
ATTR_WATER_BODY_TYPE = 'water_body_type'
CONF_STATION = 'station'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
SENSOR_DISCHARGE = 'discharge'
SENSOR_LEVEL = 'level'
SENSOR_TEMPERATURE = 'temperature'
CONDITIONS = {
SENSOR_DISCHARGE: 'mdi:waves',
SENSOR_LEVEL: 'mdi:zodiac-aquarius',
SENSOR_TEMPERATURE: 'mdi:oil-temperature',
}
CONDITION_DETAILS = [
ATTR_DELTA_24H,
ATTR_MAX_1H,
ATTR_MAX_24H,
ATTR_MEAN_1H,
ATTR_MEAN_24H,
ATTR_MIN_1H,
ATTR_MIN_24H,
ATTR_PREVIOUS_24H,
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_STATION): vol.Coerce(int),
vol.Optional(CONF_MONITORED_CONDITIONS, default=[SENSOR_TEMPERATURE]):
vol.All(cv.ensure_list, [vol.In(CONDITIONS)]),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Swiss hydrological sensor."""
station = config.get(CONF_STATION)
monitored_conditions = config.get(CONF_MONITORED_CONDITIONS)
hydro_data = HydrologicalData(station)
hydro_data.update()
if hydro_data.data is None:
_LOGGER.error("The station doesn't exists: %s", station)
return
entities = []
for condition in monitored_conditions:
entities.append(
SwissHydrologicalDataSensor(hydro_data, station, condition))
add_entities(entities, True)
class SwissHydrologicalDataSensor(Entity):
"""Implementation of a Swiss hydrological sensor."""
def __init__(self, hydro_data, station, condition):
"""Initialize the Swiss hydrological sensor."""
self.hydro_data = hydro_data
self._condition = condition
self._data = self._state = self._unit_of_measurement = None
self._icon = CONDITIONS[condition]
self._station = station
@property
def name(self):
"""Return the name of the sensor."""
return "{0} {1}".format(self._data['water-body-name'], self._condition)
@property
def unique_id(self) -> str:
"""Return a unique, friendly identifier for this entity."""
return '{0}_{1}'.format(self._station, self._condition)
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
if self._state is not None:
return self.hydro_data.data['parameters'][self._condition]['unit']
return None
@property
def state(self):
"""Return the state of the sensor."""
if isinstance(self._state, (int, float)):
return round(self._state, 2)
return None
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attrs = {}
if not self._data:
attrs[ATTR_ATTRIBUTION] = ATTRIBUTION
return attrs
attrs[ATTR_WATER_BODY_TYPE] = self._data['water-body-type']
attrs[ATTR_STATION] = self._data['name']
attrs[ATTR_STATION_UPDATE] = \
self._data['parameters'][self._condition]['datetime']
attrs[ATTR_ATTRIBUTION] = ATTRIBUTION
for entry in CONDITION_DETAILS:
attrs[entry.replace('-', '_')] = \
self._data['parameters'][self._condition][entry]
return attrs
@property
def icon(self):
"""Icon to use in the frontend."""
return self._icon
def update(self):
"""Get the latest data and update the state."""
self.hydro_data.update()
self._data = self.hydro_data.data
if self._data is None:
self._state = None
else:
self._state = self._data['parameters'][self._condition]['value']
class HydrologicalData:
"""The Class for handling the data retrieval."""
def __init__(self, station):
"""Initialize the data object."""
self.station = station
self.data = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data."""
from swisshydrodata import SwissHydroData
shd = SwissHydroData()
self.data = shd.get_station(self.station)
|
#!/bin/bash
#Define a default pattern for limiting the branches to be removed
pattern=""
#Define a default branch to check against
target="origin/master"
#Define xterm control code variables
green="\033[1;32m"
yellow="\033[1;33m"
red="\033[1;31m"
end="\033[m"
#parse the options
while getopts ":fghp:t:" opt; do
case $opt in
#Help Data
h)
echo -e "\nusage: ./removeMergedLocalBranches.sh -f -g [-p <pattern>] [-t <targetBranch>]\n"
echo -e " Options:"
echo -e "\t-f fetch from all remotes before checking local branches\n"
echo -e "\t-g output messaging in greyscale\n"
echo -e "\t-p the grep pattern used to limit the branches to remove"
echo -e "\t *note* all patterns start from the first character"
echo -e "\t of the branch name, so in order to match any part"
echo -e "\t of the branch name, begin your pattern with .*\n"
echo -e "\t-t target branch to base check on Default: origin/master"
exit 0
;;
#turn on greyscale mode
g)
green=""
yellow=""
red=""
end=""
;;
#Fetch before check
f)
git fetch --all
;;
#Set pattern if supplied
p)
pattern=$OPTARG
;;
#Set target if supplied
t)
target=$OPTARG
;;
#user has supplied an invalid option
\?)
echo -e "${red}Invalid option: -$OPTARG${end}" >&2
exit 1
;;
#user has not supplied a required argument
:)
echo -e "${red}Option -$OPTARG requires an argument.${end}" >&2
exit 1
;;
esac
done
#Fetch the SHA-1 hash for the target branch
targetsha=`git rev-parse $target 2> /dev/null`
if [ $? -ne 0 ]; then
echo -e "${red}The target specified cannot be resolved, please double check the reference.${end}" >&2
exit 1
fi
#Check to see if the current branch has been merged, output a notice if it is
currentBranchMatch=`git branch --merged $targetsha | grep "^* $pattern" | xargs`
if [ -n "$currentBranchMatch" ]
then
echo -e "\n${yellow}The current branch has been merged into $target, but cannot be deleted.${end}"
other=" other"
else
other=""
fi
#Check for branches that have been merged
branches=`git branch --merged $targetsha | grep "^ $pattern" | xargs`
if [ -z "$branches" ]
then
if [ -z "$pattern" ]
then
echo -e "\n${green}No$other local branches merged into $target found${end}\n"
exit 0
else
echo -e "\n${green}No$other local branches merged into $target match the pattern '$pattern'${end}\n"
exit 0
fi
fi
#Remove the branches that have been merged
if [ -z "$pattern" ]
then
echo -e "\n${green}Deleting local branches merged into $target${end}\n"
else
echo -e "\n${green}Deleting local branches merged into $target that match the pattern '$pattern'${end}\n"
fi
git branch -D $branches
|
import numpy as np
class get_config(object):
def __init__(self, args):
self.dataset = args.dataset
self.datapath = args.data_path
# data list
self.trn_lst = None
self.trn_lb = None
self.val_lst = None
self.val_lb = None
# data dimension
self.imgdims = None
self.patchdims = None
self.outputdims = None
# amount to pad input images,
# useful for resolving undesired boundary effects
self.pad = None
self.__set__()
def __set__(self):
if self.dataset == 'imagenet':
# data preprocessing follows:
# https://github.com/bertinetto/siamese-fc/tree/master/ILSVRC15-curation
# reference:
# <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
# "Fully-Convolutional Siamese Networks for Object Tracking", In ECCV16 Workshop.
# the object is always centered
self.imgdims = (255, 255, 3)
self.patchdims = (63, 63, 3)
self.outputdims = (64, 64, 1)
self.patch_start = 127-32
self.patch_end = 127+31
self.pad = 0
elif self.dataset == 'vgg_cell':
self.imgdims = (800, 800, 3)
self.patchdims = (64, 64, 3)
self.outputdims = (200, 200, 1)
self.pad = 0
elif self.dataset == 'vgg_cell_new1':
self.imgdims = (800, 800, 3)
self.patchdims = (64, 64, 3)
self.outputdims = (200, 200, 1)
self.pad = 0
elif self.dataset == 'pipes':
self.imgdims = (800, 800, 3)
self.patchdims = (64, 64, 3)
self.outputdims = (200, 200, 1)
self.pad = 0
elif self.dataset == 'hela_cell':
self.imgdims = (800, 800, 3)
self.patchdims = (64, 64, 3)
self.outputdims = (200, 200, 1)
self.pad = 0
elif self.dataset == 'car':
self.imgdims = (360, 640, 3)
self.patchdims = (64, 64, 3)
self.outputdims = (90, 160, 1)
self.pad = 0
elif self.dataset == 'crowd':
self.imgdims = (256, 256, 3)
self.patchdims = (128, 128, 3)
self.outputdims = (64, 64, 1)
self.pad = 0
else:
raise IOError('==> unknown data set.')
with np.load('../meta/{}.npz'.format(self.dataset)) as data:
self.trn_lst = data['trn_lst']
self.trn_lb = data['trn_lb']
self.val_lst = data['val_lst']
self.val_lb = data['val_lb']
|
# OBSS SAHI Tool
# Code written by <NAME>, 2020.
import glob
import json
import ntpath
import os
import pickle
import re
import urllib.request
import zipfile
from os import path
from pathlib import Path
import numpy as np
def create_dir(_dir):
"""
Creates given directory if it is not present.
"""
if not os.path.exists(_dir):
os.makedirs(_dir)
def unzip(file_path: str, dest_dir: str):
"""
Unzips compressed .zip file.
Example inputs:
file_path: 'data/01_alb_id.zip'
dest_dir: 'data/'
"""
# unzip file
with zipfile.ZipFile(file_path) as zf:
zf.extractall(dest_dir)
def save_json(data, save_path):
"""
Saves json formatted data (given as "data") as save_path
Example inputs:
data: {"image_id": 5}
save_path: "dirname/coco.json"
"""
# create dir if not present
save_dir = os.path.dirname(save_path)
create_dir(save_dir)
# export as json
with open(save_path, "w", encoding="utf-8") as outfile:
json.dump(data, outfile, separators=(",", ":"), cls=NumpyEncoder)
# type check when save json files
class NumpyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NumpyEncoder, self).default(obj)
def load_json(load_path):
"""
Loads json formatted data (given as "data") from load_path
Example inputs:
load_path: "dirname/coco.json"
"""
# read from path
with open(load_path) as json_file:
data = json.load(json_file)
return data
def list_files(
directory: str,
contains: list = [".json"],
verbose: int = 1,
) -> list:
"""
Walk given directory and return a list of file path with desired extension
Args:
directory: str
"data/coco/"
contains: list
A list of strings to check if the target file contains them, example: ["coco.png", ".jpg", "jpeg"]
verbose: int
0: no print
1: print number of files
Returns:
filepath_list : list
List of file paths
"""
# define verboseprint
verboseprint = print if verbose else lambda *a, **k: None
filepath_list = []
for file in os.listdir(directory):
# check if filename contains any of the terms given in contains list
if any(strtocheck in file for strtocheck in contains):
filepath = os.path.join(directory, file)
filepath_list.append(filepath)
number_of_files = len(filepath_list)
folder_name = directory.split(os.sep)[-1]
verboseprint("There are {} listed files in folder {}.".format(number_of_files, folder_name))
return filepath_list
def list_files_recursively(directory: str, contains: list = [".json"], verbose: str = True) -> (list, list):
"""
Walk given directory recursively and return a list of file path with desired extension
Arguments
-------
directory : str
"data/coco/"
contains : list
A list of strings to check if the target file contains them, example: ["coco.png", ".jpg", "jpeg"]
verbose : bool
If true, prints some results
Returns
-------
relative_filepath_list : list
List of file paths relative to given directory
abs_filepath_list : list
List of absolute file paths
"""
# define verboseprint
verboseprint = print if verbose else lambda *a, **k: None
# walk directories recursively and find json files
abs_filepath_list = []
relative_filepath_list = []
# r=root, d=directories, f=files
for r, _, f in os.walk(directory):
for file in f:
# check if filename contains any of the terms given in contains list
if any(strtocheck in file for strtocheck in contains):
abs_filepath = os.path.join(r, file)
abs_filepath_list.append(abs_filepath)
relative_filepath = abs_filepath.split(directory)[-1]
relative_filepath_list.append(relative_filepath)
number_of_files = len(relative_filepath_list)
folder_name = directory.split(os.sep)[-1]
verboseprint("There are {} listed files in folder {}.".format(number_of_files, folder_name))
return relative_filepath_list, abs_filepath_list
def get_base_filename(path: str):
"""
Takes a file path, returns (base_filename_with_extension, base_filename_without_extension)
"""
base_filename_with_extension = ntpath.basename(path)
base_filename_without_extension, _ = os.path.splitext(base_filename_with_extension)
return base_filename_with_extension, base_filename_without_extension
def get_file_extension(path: str):
filename, file_extension = os.path.splitext(path)
return file_extension
def load_pickle(load_path):
"""
Loads pickle formatted data (given as "data") from load_path
Example inputs:
load_path: "dirname/coco.pickle"
"""
# read from path
with open(load_path) as json_file:
data = pickle.load(json_file)
return data
def save_pickle(data, save_path):
"""
Saves pickle formatted data (given as "data") as save_path
Example inputs:
data: {"image_id": 5}
save_path: "dirname/coco.pickle"
"""
# create dir if not present
save_dir = os.path.dirname(save_path)
create_dir(save_dir)
# export as json
with open(save_path, "wb") as outfile:
pickle.dump(data, outfile)
def import_class(model_name):
"""
Imports a predefined detection class by class name.
Args:
model_name: str
Name of the detection model class (example: "MmdetDetectionModel")
Returns:
class_: class with given path
"""
module = __import__("sahi.model", fromlist=[model_name])
class_ = getattr(module, model_name)
return class_
def increment_path(path, exist_ok=True, sep=""):
# Increment path, i.e. runs/exp --> runs/exp{sep}0, runs/exp{sep}1 etc.
path = Path(path) # os-agnostic
if (path.exists() and exist_ok) or (not path.exists()):
return str(path)
else:
dirs = glob.glob(f"{path}{sep}*") # similar paths
matches = [re.search(rf"%s{sep}(\d+)" % path.stem, d) for d in dirs]
i = [int(m.groups()[0]) for m in matches if m] # indices
n = max(i) + 1 if i else 2 # increment number
return f"{path}{sep}{n}" # update path
def download_from_url(from_url: str, to_path: str):
Path(to_path).parent.mkdir(parents=True, exist_ok=True)
if not path.exists(to_path):
urllib.request.urlretrieve(
from_url,
to_path,
)
|
<filename>coca-co/src/main/java/coca/co/ins/VoidCoIns.java
/**
*
*/
package coca.co.ins;
/**
* What?
*
* @author dzh
* @date Sep 2, 2017 5:44:31 PM
* @since 0.0.1
*/
public class VoidCoIns extends BasicCoIns<Void> {
public static final VoidCoIns VOID = new VoidCoIns();
public VoidCoIns() {
super(Ins.VOID);
}
}
|
#!/bin/sh
# Install Docker
curl -sSL get.docker.com | sh && \
sudo usermod pi -aG docker
# Disable Swap
sudo dphys-swapfile swapoff && \
sudo dphys-swapfile uninstall && \
sudo update-rc.d dphys-swapfile remove
echo Adding " cgroup_enable=cpuset cgroup_enable=memory" to /boot/cmdline.txt
sudo cp /boot/cmdline.txt /boot/cmdline_backup.txt
# if you encounter problems, try changing cgroup_memory=1 to cgroup_enable=memory.
orig="$(head -n1 /boot/cmdline.txt) cgroup_enable=cpuset cgroup_memory=1"
echo $orig | sudo tee /boot/cmdline.txt
# Add repo list and install kubeadm
curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key add - && \
echo "deb http://apt.kubernetes.io/ kubernetes-xenial main" | sudo tee /etc/apt/sources.list.d/kubernetes.list && \
sudo apt-get update -q && \
sudo apt-get install -qy kubeadm
|
package com.report.adapter.persistence.converter;
import com.report.application.entity.FilmCharacter;
import org.modelmapper.AbstractConverter;
public class FilmCharacterValueObjectToEntityConverter extends AbstractConverter<com.report.application.domain.vo.FilmCharacter, FilmCharacter> {
@Override
protected FilmCharacter convert(com.report.application.domain.vo.FilmCharacter filmCharacter) {
if (filmCharacter == null) {
return null;
}
Long filmId = filmCharacter.getFilmId()
.getRaw()
.longValue();
String filmName = filmCharacter.getFilmName()
.getRaw();
Long characterId = filmCharacter.getCharacterId()
.getRaw()
.longValue();
String characterName = filmCharacter.getCharacterName()
.getRaw();
Long planetId = filmCharacter.getPlanetId()
.getRaw()
.longValue();
String planetName = filmCharacter.getPlanetName()
.getRaw();
FilmCharacter entity = new FilmCharacter();
entity.setFilmId(filmId);
entity.setFilmName(filmName);
entity.setCharacterId(characterId);
entity.setCharacterName(characterName);
entity.setPlanetId(planetId);
entity.setPlanetName(planetName);
return entity;
}
} |
<reponame>v-gu/flume<gh_stars>0
/*
Copyright 2013 Vincent.Gu
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.flume.source.syncdir;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.Lists;
import org.apache.flume.ChannelException;
import org.apache.flume.Context;
import org.apache.flume.CounterGroup;
import org.apache.flume.Event;
import org.apache.flume.EventDrivenSource;
import org.apache.flume.conf.Configurable;
import org.apache.flume.event.EventBuilder;
import org.apache.flume.source.AbstractSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* SyncDirSource is a source that will sync files like spool directory but also
* copy the directory's original layout. Also unlike spool directory, this
* source will also track changed files.
* <p/>
* For e.g., a file will be identified as finished and stops reading from it if
* an empty file with suffix ".done" that present in the same directory of the
* same name as of the original file.
*/
public class SyncDirSource extends AbstractSource implements
Configurable, EventDrivenSource {
private static final Logger logger = LoggerFactory
.getLogger(SyncDirSource.class);
// Delay used when polling for file changes
private boolean backoff = true;
private int backoffInterval;
private int maxBackoffInterval;
/* Config options */
private File syncDirectory;
private String directoryPrefix;
private String endFileSuffix;
private String statsFilePrefix;
private String syncingStatsFileSuffix;
private String syncedStatsFileSuffix;
private String ignoredFileRegex;
private String filenameHeaderKey =
SyncDirSourceConfigurationConstants.FILENAME_HEADER_KEY;
private int batchSize;
private ScheduledExecutorService executor;
private CounterGroup counterGroup;
private Runnable runner;
private SyncDirFileLineReader reader;
private boolean hitChannelException;
private boolean hasFatalError;
@Override
public synchronized void start() {
logger.info("SyncDirSource source starting with directory:{}",
syncDirectory);
counterGroup = new CounterGroup();
reader = new SyncDirFileLineReader(
syncDirectory, endFileSuffix,
statsFilePrefix, syncingStatsFileSuffix, syncedStatsFileSuffix, ignoredFileRegex);
runner = new DirectorySyncRunnable(reader, counterGroup);
executor = Executors.newSingleThreadScheduledExecutor();
executor.scheduleWithFixedDelay(runner, 0, 2000, TimeUnit.MILLISECONDS);
super.start();
logger.debug("SyncDirSource source started");
}
@Override
public synchronized void stop() {
executor.shutdown();
try {
executor.awaitTermination(10L, TimeUnit.SECONDS);
} catch (InterruptedException ex) {
logger.info("Interrupted while awaiting termination", ex);
}
executor.shutdownNow();
super.stop();
logger.debug("SyncDirSource source stopped");
}
@Override
public void configure(Context context) {
String syncDirectoryStr = context.getString(
SyncDirSourceConfigurationConstants.SYNC_DIRECTORY);
Preconditions.checkState(syncDirectoryStr != null,
"Configuration must specify a sync directory");
syncDirectory = new File(syncDirectoryStr);
directoryPrefix = context.getString(
SyncDirSourceConfigurationConstants.DIRECTORY_PREFIX,
SyncDirSourceConfigurationConstants.DEFAULT_DIRECTORY_PREFIX);
endFileSuffix = context.getString(
SyncDirSourceConfigurationConstants.END_FILE_SUFFIX,
SyncDirSourceConfigurationConstants.DEFAULT_END_FILE_SUFFIX);
statsFilePrefix = context.getString(
SyncDirSourceConfigurationConstants.STATS_FILE_PREFIX,
SyncDirSourceConfigurationConstants.DEFAULT_STATS_FILE_PREFIX);
syncingStatsFileSuffix = context.getString(
SyncDirSourceConfigurationConstants.SYNCING_STATS_FILE_SUFFIX,
SyncDirSourceConfigurationConstants.DEFAULT_SYNCING_STATS_FILE_SUFFIX);
syncedStatsFileSuffix = context.getString(
SyncDirSourceConfigurationConstants.SYNCED_STATS_FILE_SUFFIX,
SyncDirSourceConfigurationConstants.DEFAULT_SYNCED_STATS_FILE_SUFFIX);
ignoredFileRegex = context.getString(
SyncDirSourceConfigurationConstants.IGNORED_FILE_REGEX,
SyncDirSourceConfigurationConstants.DEFAULT_IGNORED_FILE_REGEX);
batchSize = context.getInteger(
SyncDirSourceConfigurationConstants.BATCH_SIZE,
SyncDirSourceConfigurationConstants.DEFAULT_BATCH_SIZE);
backoffInterval = context.getInteger(
SyncDirSourceConfigurationConstants.BACKOFF_INTERVAL,
SyncDirSourceConfigurationConstants.DEFAULT_BACKOFF_INTERVAL);
maxBackoffInterval = context.getInteger(
SyncDirSourceConfigurationConstants.MAX_BACKOFF_INTERVAL,
SyncDirSourceConfigurationConstants.DEFAULT_MAX_BACKOFF_INTERVAL);
}
private Event createEvent(byte[] lineEntry, String filename) {
Event out = EventBuilder.withBody(lineEntry);
if (directoryPrefix.length() > 0) {
out.getHeaders().put(filenameHeaderKey,
directoryPrefix + File.separator + filename);
} else {
out.getHeaders().put(filenameHeaderKey, filename);
}
return out;
}
/** for testing */
protected boolean hitChannelException() {
return hitChannelException;
}
/** for testing */
protected void setBackoff(final boolean backoff) {
this.backoff = backoff;
}
/** for testing */
protected boolean hasFatalError() {
return hasFatalError;
}
private class DirectorySyncRunnable implements Runnable {
private SyncDirFileLineReader reader;
private CounterGroup counterGroup;
public DirectorySyncRunnable(SyncDirFileLineReader reader,
CounterGroup counterGroup) {
this.reader = reader;
this.counterGroup = counterGroup;
}
@Override
public void run() {
try {
while (!Thread.interrupted()) {
List<byte[]> lines = reader.readLines(batchSize);
if (lines.size() == 0) {
break;
}
String file = syncDirectory.toURI().relativize(
reader.getLastFileRead().toURI()).getPath();
List<Event> events = Lists.newArrayList();
for (byte[] l : lines) {
counterGroup.incrementAndGet("syncdir.lines.read");
events.add(createEvent(l, file));
}
try {
getChannelProcessor().processEventBatch(events);
reader.commit();
} catch (ChannelException e) {
hitChannelException = true;
logger.warn("The channel is full, or this source's batch size is "
+ "lager than channel's transaction capacity. This source will "
+ "try again after " + String.valueOf(backoffInterval)
+ " milliseconds");
if (backoff) {
TimeUnit.MILLISECONDS.sleep(backoffInterval);
backoffInterval = backoffInterval << 1;
backoffInterval = backoffInterval >= maxBackoffInterval ?
maxBackoffInterval : backoffInterval;
}
continue;
}
}
} catch (Throwable t) {
logger.error("FATAL: " + this.toString() + ": " +
"Uncaught exception in SpoolDirectorySource thread. " +
"Restart or reconfigure Flume to continue processing.", t);
hasFatalError = true;
Throwables.propagate(t);
}
}
}
}
|
export default function(ngapp, fileHelpers) {
ngapp.directive('settingsModal', function () {
return {
restrict: 'E',
templateUrl: 'directives/settingsModal.html',
controller: 'settingsModalController',
scope: false
}
});
ngapp.controller('settingsModalController', function($scope, formUtils, settingsService) {
// inherited functions
$scope.unfocusSettingsModal = formUtils.unfocusModal($scope.saveSettings);
// initialize scope variables
$scope.settings = settingsService.settings;
$scope.profileName = settingsService.currentProfile.name;
// scope functions
$scope.saveSettings = function() {
settingsService.saveSettings($scope.settings);
$scope.toggleSettingsModal();
};
$scope.buildFileEntry = function(filename, results) {
let filePath = 'cache\\' + filename;
let cachedErrors = fileHelpers.loadJsonFile(filePath, {});
let modified = fileHelpers.getDateModified(filePath);
return {
hash: results[2],
error_count: cachedErrors.length,
modified: modified
}
};
$scope.addCacheEntry = function(filename) {
let fileRegex = /(.+\.es[p|m])\-([a-zA-Z0-9]{32})\.json/;
let results = fileRegex.exec(filename);
if (!results) return;
let entry = $scope.errorCache.find(function(entry) {
return entry.filename === results[1];
});
let file = $scope.buildFileEntry(filename, results);
if (!entry) {
$scope.errorCache.push({
filename: results[1],
files: [file]
});
} else {
entry.files.push(file);
}
};
$scope.loadErrorCache = function() {
$scope.errorCache = [];
let paths = fileHelpers.appDir.find('cache', { matching: '*.json', files: true, directories: false });
paths.forEach(function(path) {
let parts = path.split('\\');
let filename = parts[parts.length - 1];
try {
$scope.addCacheEntry(filename);
} catch(x) {
console.log('Error adding error cache entry:');
console.log(x);
}
});
};
$scope.deleteFile = function(filename) {
fileHelpers.appDir.remove(`cache\\${filename}`);
};
$scope.deleteCache = function(cache, file) {
if (!file || cache.files.length == 1) {
let index = $scope.errorCache.indexOf(cache);
cache.files.forEach(function(file) {
$scope.deleteFile(`${cache.filename}-${file.hash}.json`);
});
$scope.errorCache.splice(index, 1);
} else {
$scope.deleteFile(`${cache.filename}-${file.hash}.json`);
let index = cache.files.indexOf(file);
cache.files.splice(index, 1);
}
};
$scope.clearErrorCache = function() {
if (!confirm('Clear the entire error cache?')) return;
$scope.errorCache.forEach(function(cache) {
$scope.deleteCache(cache);
});
};
$scope.toggleErrorCache = function(visible) {
if (visible && !$scope.errorCache) $scope.loadErrorCache();
$scope.showErrorCache = visible;
};
});
}
|
<gh_stars>0
/* eslint-disable @typescript-eslint/no-var-requires */
const fs = require('fs');
const pythagoras = (x1, x2, y1, y2) =>
Math.sqrt(Math.pow(x1 - x2, 2) + Math.pow(y1 - y2, 2));
const islands = JSON.parse(fs.readFileSync('./data/islands.json')).islands;
const distances = [];
for (let i = 0; i < islands.length - 1; i++) {
const temp = { a: islands[i].id, islands: [] };
for (let j = i; j < islands.length; j++) {
if (i === j) continue;
temp.islands.push({
b: islands[j].id,
distance: pythagoras(
islands[i].x,
islands[j].x,
islands[i].y,
islands[j].y
),
});
}
distances.push(temp);
}
fs.writeFileSync(
'./data/distances.json',
JSON.stringify({ distances: distances })
);
|
#!/bin/bash
SCRIPT_NAME=$(basename $0)
. load-config.sh
ADB=adb
GDB=${GDB:-prebuilt/$(uname -s | tr "[[:upper:]]" "[[:lower:]]")-x86/toolchain/arm-linux-androideabi-4.4.x/bin/arm-linux-androideabi-gdb}
B2G_BIN=/system/b2g/b2g
GDBINIT=/tmp/b2g.gdbinit.$(whoami)
GONK_OBJDIR=out/target/product/$DEVICE
SYMDIR=$GONK_OBJDIR/symbols
GDBSERVER_PID=$($ADB shell 'toolbox ps gdbserver | (read header; read user pid rest; echo $pid)')
GDB_PORT=$((10000 + $(id -u) % 50000))
if [ "$1" = "attach" -a -n "$2" ] ; then
B2G_PID=$2
if [ -z "$($ADB ls /proc/$B2G_PID)" ] ; then
echo Error: PID $B2G_PID is invalid
exit 1;
fi
GDB_PORT=$((10000 + ($B2G_PID + $(id -u)) % 50000))
# cmdline is null separated
B2G_BIN=$($ADB shell cat /proc/$B2G_PID/cmdline | tr '\0' '\n' | head -1)
else
B2G_PID=$($ADB shell 'toolbox ps b2g | (read header; read user pid rest; echo -n $pid)')
fi
for p in $GDBSERVER_PID ; do
$ADB shell cat /proc/$p/cmdline | grep -q :$GDB_PORT && ( \
echo ..killing gdbserver pid $p
$ADB shell kill $p
) || echo ..ignoring gdbserver pid $p
done
$ADB forward tcp:$GDB_PORT tcp:$GDB_PORT
if [ "$1" = "attach" ]; then
if [ -z "$B2G_PID" ]; then
echo Error: No PID to attach to. B2G not running?
exit 1
fi
$ADB shell gdbserver :$GDB_PORT --attach $B2G_PID &
else
if [ -n "$1" ]; then
B2G_BIN=$1
shift
fi
[ -n "$MOZ_DEBUG_CHILD_PROCESS" ] && GDBSERVER_ENV="$GDBSERVER_ENV MOZ_DEBUG_CHILD_PROCESS=$MOZ_DEBUG_CHILD_PROCESS "
[ -n "$MOZ_IPC_MESSAGE_LOG" ] && GDBSERVER_ENV="$GDBSERVER_ENV MOZ_IPC_MESSAGE_LOG=$MOZ_IPC_MESSAGE_LOG "
$ADB shell kill $B2G_PID
[ "$B2G_BIN" = "/system/b2g/b2g" ] && $ADB shell stop b2g
$ADB shell LD_LIBRARY_PATH=/system/b2g LD_PRELOAD=/system/b2g/libmozglue.so $GDBSERVER_ENV gdbserver --multi :$GDB_PORT $B2G_BIN $@ &
fi
sleep 1
echo "set solib-absolute-prefix $SYMDIR" > $GDBINIT
echo "set solib-search-path $GECKO_OBJDIR/dist/bin:$SYMDIR/system/lib:$SYMDIR/system/lib/hw:$SYMDIR/system/lib/egl:$SYMDIR/system/bin:$GONK_OBJDIR/system/lib:$GONK_OBJDIR/system/lib/egl:$GONK_OBJDIR/system/lib/hw:$GONK_OBJDIR/system/vendor/lib:$GONK_OBJDIR/system/vendor/lib/hw:$GONK_OBJDIR/system/vendor/lib/egl" >> $GDBINIT
echo "target extended-remote :$GDB_PORT" >> $GDBINIT
PROG=$GECKO_OBJDIR/dist/bin/$(basename $B2G_BIN)
[ -f $PROG ] || PROG=${SYMDIR}${B2G_BIN}
if [ "$SCRIPT_NAME" == "run-ddd.sh" ]; then
echo "ddd --debugger \"$GDB -x $GDBINIT\" $PROG"
ddd --debugger "$GDB -x $GDBINIT" $PROG
else
echo $GDB -x $GDBINIT $PROG
$GDB -x $GDBINIT $PROG
fi
|
import android.graphics.Color;
import android.graphics.Bitmap;
public static String detectColor(Bitmap image) {
int color = Color.parseColor(Color.red(image));
String hexColor = String.format("#%06x", (0xFFFFFF & color));
return hexColor;
} |
tell application "Safari"
open location "www.example.com"
end tell |
function calculateAverage(arr) {
var sum = 0;
for (var i = 0; i<arr.length; i++) {
sum += arr[i];
}
return sum/arr.length;
}
var arr = [1, 4, 2 ,6];
var average = calculateAverage(arr);
console.log(average); // 3.5 |
# rubocop:disable Metrics/LineLength
# == Schema Information
#
# Table name: media_ignores
#
# id :integer not null, primary key
# media_type :string indexed => [media_id]
# created_at :datetime not null
# updated_at :datetime not null
# media_id :integer indexed => [media_type]
# user_id :integer indexed
#
# Indexes
#
# index_media_ignores_on_media_type_and_media_id (media_type,media_id)
# index_media_ignores_on_user_id (user_id)
#
# Foreign Keys
#
# fk_rails_ce29fae9fe (user_id => users.id)
#
# rubocop:enable Metrics/LineLength
class MediaIgnore < ApplicationRecord
belongs_to :media, polymorphic: true, required: true
belongs_to :user, required: true
validates :media, polymorphism: { type: Media }
scope :for_library_entry, ->(le) { where(media: le.media, user: le.user) }
def library_entry
LibraryEntry.find_by(user: user, media: media)
end
def media_follow_service
MediaFollowService.new(user, media)
end
after_commit(on: :create) { media_follow_service.destroy }
after_commit(on: :destroy) { media_follow_service.create }
end
|
def ascending_sum(nums):
nums.sort()
total = 0
for i in nums:
total += i
return total
print(ascending_sum([1, 2, 3, 4, 5])) |
type Query {
contacts: [Contact]
contact(id: ID!): Contact
}
type Mutation {
addContact(name: String!, phoneNumber: Int!): Contact
updateContact(id: ID!, name: String!, phoneNumber: Int!): Contact
deleteContact(id: ID!): Contact
}
type Contact {
id: ID!
name: String!
phoneNumber: Int!
} |
#!/bin/bash
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
#WARNING: must have compiled PyTorch and caffe2
#check if extra argument is passed to the test
if [[ $# == 1 ]]; then
dlrm_extra_option=$1
else
dlrm_extra_option=""
fi
#echo $dlrm_extra_option
dlrm_py="python dlrm_s_pytorch.py"
dlrm_c2="python dlrm_s_caffe2.py"
echo "Running commands ..."
#run pytorch
echo $dlrm_py
$dlrm_py --mini-batch-size=1 --data-size=1 --nepochs=1 --arch-interaction-op=dot --learning-rate=0.1 --debug-mode $dlrm_extra_option
$dlrm_py --mini-batch-size=2 --data-size=4 --nepochs=1 --arch-interaction-op=dot --learning-rate=0.1 --debug-mode $dlrm_extra_option
$dlrm_py --mini-batch-size=2 --data-size=5 --nepochs=1 --arch-interaction-op=dot --learning-rate=0.1 --debug-mode $dlrm_extra_option
$dlrm_py --mini-batch-size=2 --data-size=5 --nepochs=3 --arch-interaction-op=dot --learning-rate=0.1 --debug-mode $dlrm_extra_option
#run caffe2
echo $dlrm_c2
$dlrm_c2 --mini-batch-size=1 --data-size=1 --nepochs=1 --arch-interaction-op=dot --learning-rate=0.1 --debug-mode $dlrm_extra_option
$dlrm_c2 --mini-batch-size=2 --data-size=4 --nepochs=1 --arch-interaction-op=dot --learning-rate=0.1 --debug-mode $dlrm_extra_option
$dlrm_c2 --mini-batch-size=2 --data-size=5 --nepochs=1 --arch-interaction-op=dot --learning-rate=0.1 --debug-mode $dlrm_extra_option
$dlrm_c2 --mini-batch-size=2 --data-size=5 --nepochs=3 --arch-interaction-op=dot --learning-rate=0.1 --debug-mode $dlrm_extra_option
echo "Checking results ..."
#check results
#WARNING: correct test will have no difference in numeric values
#(but might have some verbal difference, e.g. due to warnnings)
#in the output file
echo "diff test1 (no numeric values in the output = SUCCESS)"
diff ccc1 ppp1
echo "diff test2 (no numeric values in the output = SUCCESS)"
diff ccc2 ppp2
echo "diff test3 (no numeric values in the output = SUCCESS)"
diff ccc3 ppp3
echo "diff test4 (no numeric values in the output = SUCCESS)"
diff ccc4 ppp4
|
# oh-my-zsh's keybindings come with such "features" as making up-arrow default to prefix searching. Remove these entirely
|
package main
import "testing"
func Benchmarkch1ch2(b *testing.B) {
done := make(chan struct{})
ch1 := make(chan int)
ch2 := make(chan int)
// TODO 又不死锁了。。。。。
go func() {
for i := 0; i < b.N; i++ {
ch1 <- 1
<-ch2
}
done <- struct{}{}
}()
go func() {
for i := 0; i < b.N; i++ {
ch2 <- 2
<-ch1
}
done <- struct{}{}
}()
<-done
<-done
close(done)
// for range ch1 {
// }
// for range ch2 {
// }
close(ch1)
close(ch2)
}
|
import React, { PureComponent } from 'react';
import { connect } from 'dva';
import { Card, Form, Icon, Button, Dropdown, Menu, Table, Modal, Input } from 'antd';
import PageHeaderWrapper from '@/components/PageHeaderWrapper';
import styles from './List.less';
import { getCategory, createCategory, deleteCategory } from '@/services/app';
/* eslint react/no-multi-comp:0 */
@connect(({ user, loading }) => ({
user,
loading: loading.models.user,
}))
class TableList extends PureComponent {
state = {
selectedRows: [],
category: [],
createVisible: false,
};
columns = [
{
title: 'id',
dataIndex: 'id',
},
{
title: '名称',
dataIndex: 'name',
},
{
title: 'icon',
dataIndex: 'icon',
},
{
title: '操作',
dataIndex: 'action',
render: (text, record) => (
<Button onClick={() => this.handleDeleteCategory(record.id)} type="danger">
删除
</Button>
),
},
];
async componentDidMount() {
this.queryCategoryList();
}
queryCategoryList = async () => {
const res = await getCategory();
if (res.success) {
this.setState({ category: res.data });
}
};
showCreate = () => {
this.setState({ createVisible: true });
};
closeCreate = () => {
this.setState({ createVisible: false });
};
handleDeleteCategory = async id => {
const res = await deleteCategory({ id });
if (res.success) {
this.queryCategoryList();
}
};
handleSubmitCreate = async e => {
e.preventDefault();
this.props.form.validateFields(async (err, values) => {
if (!err) {
console.log('Received values of form: ', values);
const res = await createCategory(values);
if (res.success) {
this.closeCreate();
this.queryCategoryList();
}
}
});
};
render() {
const { selectedRows, category, createVisible } = this.state;
const {
form: { getFieldDecorator },
} = this.props;
const menu = (
<Menu onClick={this.handleMenuClick} selectedKeys={[]}>
<Menu.Item key="remove">删除</Menu.Item>
<Menu.Item key="approval">批量审批</Menu.Item>
</Menu>
);
return (
<PageHeaderWrapper title="组件分类管理">
<Card
bordered={false}
extra={
<Button type="primary" onClick={this.showCreate}>
创建
</Button>
}
>
<div className={styles.tableList}>
<div className={styles.tableListOperator}>
{selectedRows.length > 0 && (
<span>
<Button>批量操作</Button>
<Dropdown overlay={menu}>
<Button>
更多操作 <Icon type="down" />
</Button>
</Dropdown>
</span>
)}
</div>
<Table rowKey="id" columns={this.columns} dataSource={category} pagination={false} />
</div>
</Card>
<Modal title="创建分类" visible={createVisible} footer={null} onCancel={this.closeCreate}>
<Form onSubmit={this.handleSubmitCreate}>
<Form.Item label="名称">
{getFieldDecorator('name', {
rules: [{ required: true, message: '请输入分类名称' }],
})(<Input placeholder="分类名称" />)}
</Form.Item>
<Form.Item label="icon">
{getFieldDecorator('icon', {
rules: [{ required: true, message: '请输入分类 icon' }],
})(<Input placeholder="分类 icon" />)}
</Form.Item>
<Form.Item>
<Button type="primary" htmlType="submit" className="login-form-button">
提交
</Button>
</Form.Item>
</Form>
</Modal>
</PageHeaderWrapper>
);
}
}
export default Form.create()(TableList);
|
class ElementUtils {
static classListAdd(element, ...classNames) {
for (let i = 0; i < classNames.length; i += 1) {
const className = classNames[i];
element.classList.add(className);
}
}
static classListRemove(element, ...classNames) {
for (let i = 0; i < classNames.length; i += 1) {
const className = classNames[i];
element.classList.remove(className);
}
}
}
export default ElementUtils;
|
<filename>src/bus/management/AssignBus.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package bus.management;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import javax.swing.JOptionPane;
/**
*
*
*/
public class AssignBus extends javax.swing.JInternalFrame {
/**
* Creates new form AssignBus
*/
public AssignBus() {
initComponents();
employeeCBFillData();
busCBFillData();
}
private void employeeCBFillData()
{
try{
System.out.println("In");
Class.forName("com.mysql.jdbc.Driver");
String databaseURL = "jdbc:mysql://localhost:3306/busm";
Connection con = DriverManager.getConnection(databaseURL, "root", "root");
System.out.println("drive");
Statement stat = con.createStatement();
String selectQuery="select firstname from employee_details";
ResultSet rs=stat.executeQuery(selectQuery);
while(rs.next())
{
jComboBox1.addItem(rs.getString("firstname"));
}
}
catch(Exception e)
{
System.out.println(e);
}
}
private void busCBFillData()
{
try{
System.out.println("In");
Class.forName("com.mysql.jdbc.Driver");
String databaseURL = "jdbc:mysql://localhost:3306/busm";
Connection con = DriverManager.getConnection(databaseURL,"root","root");
System.out.println("drive");
Statement stat = con.createStatement();
String selectQuery="select bus_no from bus_details";
ResultSet rs=stat.executeQuery(selectQuery);
while(rs.next())
{
jComboBox2.addItem(rs.getString("bus_no"));
}
}
catch(Exception e1)
{
System.out.println(e1);
}
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
jButton1 = new javax.swing.JButton();
jButton2 = new javax.swing.JButton();
jComboBox1 = new javax.swing.JComboBox<>();
jComboBox2 = new javax.swing.JComboBox<>();
setClosable(true);
setMaximizable(true);
setResizable(true);
jLabel1.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jLabel1.setText("Assign Bus");
jLabel2.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jLabel2.setText("Employee Name");
jLabel3.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jLabel3.setText("Bus No");
jButton1.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jButton1.setText("Assign");
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
jButton2.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
jButton2.setText("Reset");
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel1)
.addGap(357, 357, 357))
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(289, 289, 289)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jLabel3)
.addComponent(jLabel2))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 45, Short.MAX_VALUE)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jComboBox1, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jComboBox2, 0, 86, Short.MAX_VALUE)))
.addGroup(layout.createSequentialGroup()
.addGap(351, 351, 351)
.addComponent(jButton1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jButton2)))
.addGap(261, 261, 261))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(39, 39, 39)
.addComponent(jLabel1)
.addGap(46, 46, 46)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel2)
.addComponent(jComboBox1, javax.swing.GroupLayout.PREFERRED_SIZE, 32, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(33, 33, 33)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel3)
.addComponent(jComboBox2, javax.swing.GroupLayout.PREFERRED_SIZE, 31, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(39, 39, 39)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButton1)
.addComponent(jButton2))
.addContainerGap(70, Short.MAX_VALUE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
public void infoMessage(String message, String tittle) {
JOptionPane.showMessageDialog(null, message, tittle, JOptionPane.INFORMATION_MESSAGE);
}
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
// TODO add your handling code here:
String employee_name=(String)jComboBox1.getSelectedItem();
String bus_no=(String)jComboBox2.getSelectedItem();
try{
Class.forName("com.mysql.jdbc.Driver");
String databaseURL = "jdbc:mysql://localhost:3306/busm";
Connection con = DriverManager.getConnection(databaseURL, "root", "root");
Statement stat = con.createStatement();
System.out.println("drive");
String selectQuery="select bus_no from bus_assign where firstname='"+employee_name+"' and bus_no='"+bus_no+"'";
ResultSet rs=stat.executeQuery(selectQuery);
if(rs.next())
{
infoMessage("Already Bus Details is Added", "Create Fresh Entry !!");
}
else
{
String insertQuery="insert into bus_assign values(null,'"+employee_name+"','"+bus_no+"')";
int i=stat.executeUpdate(insertQuery);
if(i==1)
{
infoMessage("Bus is Assign With Employee", "Create Fresh Entry !!");
}
}
}
catch(Exception e)
{
e.printStackTrace();
}
}//GEN-LAST:event_jButton1ActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton jButton1;
private javax.swing.JButton jButton2;
private javax.swing.JComboBox<String> jComboBox1;
private javax.swing.JComboBox<String> jComboBox2;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
// End of variables declaration//GEN-END:variables
}
|
#!/bin/sh
docker run --rm --net=host lensesio/fast-data-dev
|
const express = require('express')
const crypto = require('crypto')
const app = express()
app.use(function (req, res, next) {
res.header('Access-Control-Allow-Origin', '*')
res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept')
next()
})
app.get('/', function (req, res) {
let seed = Date.now().toString()
let hash = crypto.createHash('sha256')
.update(seed, 'utf8')
.digest('hex')
res.send(JSON.stringify({ sha256 : hash }))
})
app.listen(3000, function () {
console.log('Your hash is being generated...')
})
|
#!/bin/bash
# Script to assist in correcting rotations
# (c) Silas S. Brown 2006-2008,2019-2021, v1.1215
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Where to find history:
# on GitHub at https://github.com/ssb22/scan-reflow
# and on GitLab at https://gitlab.com/ssb22/scan-reflow
# and on BitBucket https://bitbucket.org/ssb22/scan-reflow
# and at https://gitlab.developers.cam.ac.uk/ssb22/scan-reflow
# and in China: https://gitee.com/ssb22/scan-reflow
if [ ! "$1" ]; then
echo "Syntax: $0 image-file image-file ....."
echo "Helps you de-rotate all images listed on the command-line."
exit 1
fi
# Remember on cygwin $HOME may have spaces in it
if (cd "$HOME";test -e .gimp*/tmp/*-area.png); then
echo -n "Error: Some saved areas already exist in "
echo -n "$HOME/";cd "$HOME";echo .gimp*/tmp
echo "Please check if they're important, and if not, remove them."
echo "Then run this script again."
exit 1
fi
echo "
About to run gimp on all image files, one by one.
When each file loads, do this:
1. Use the selection tool
2. Choose a line of the rotated text
3. Click on the bottom left of this line
4. Drag to the bottom right of this line
5. Use the 'save area' command to save the selection
6. If the line is slanting upwards, REPEAT step 5
7. Quit the GIMP (Control-Q)
This script will then look at the dimensions of the area you
saved (and whether or not you saved it twice) and will use this
information to calculate the angle of rotation of the image.
The image will then be replaced with its de-rotated version
(and converted to PNG if it's not already PNG).
If any image is straight already, just quit (Control-Q)
and the image will be converted to PNG with no rotation.
Press Enter when you have read the above instructions.
"
read
unset NeedRemove
export TempDirectory=$(mktemp -d)
touch $TempDirectory/.ready
while [ "$1" ]; do
if test -d /cygdrive; then
# looks like we're on CygWin - this is tricky
cp "$1" /cygdrive/c/Program*Files/GIMP*/bin
pushd /cygdrive/c/Program*Files/GIMP*/bin
./gimp*.exe -d -s "$1" || exit 1
#echo "Press Enter when Gimp has terminated" ; read
mv "$1" "$OLDPWD"
popd
else gimp -d -s "$1" || exit 1; fi
export NumFiles=$(cd "$HOME";ls .gimp*/tmp/*-area.png 2>/dev/null|wc -l)
export AsPng="$(echo "$1"|sed -e 's/\.[^\.]*$/.png/')"
if ! echo "$1"|grep '\.' >/dev/null; then export AsPng="$1.png"; fi # (if no extension at all)
if test $NumFiles == 0; then
# it seems this one was straight. but we might still have to convert it to PNG.
if test "$1" != "$AsPng"; then
while ! test -e $TempDirectory/.ready; do echo "Waiting for netpbm to catch up"; sleep 1; done # (TODO unless on an SMP system. Doing it this way rather than 'wait' for a PID because sometimes Cygwin's wait is broken.)
rm $TempDirectory/.ready
(anytopnm "$1" | pnmtopng -compression 9 > "$AsPng" && rm "$1"; touch $TempDirectory/.ready) &
fi
shift; continue
fi
pushd "$HOME"
for File in .gimp*/tmp/*-area.png; do
export Geom=$(pngtopnm $File | head -2 | tail -1)
if test $(echo $Geom|sed -e 's/ .*//') -gt 300; then break; else unset Geom; fi
done
popd
if [ ! "$Geom" ]; then
echo ; echo "ERROR: You did not select a large enough area for reliable rotation (must be at least 300 pixels wide)."
echo "(This error can also be caused by a timing bug in some versions of the GIMP - try doing it again more slowly.)"
echo "Press Enter to try again."
read
elif test $NumFiles -gt 2; then
echo "ERROR: you need to choose 0, 1 or 2 areas, not $NumFiles"
echo "Press Enter to try again."
read
else
export Deg=$(echo $Geom | python -c 'import sys,math; w,h=sys.stdin.read().split() ; print(math.atan(1.0*int(h)/int(w))*180/math.pi)') # Python 2 and Python 3 should both work
if test $NumFiles == 2; then export Deg=-$Deg; fi
while ! test -e $TempDirectory/.ready; do echo "Waiting for netpbm to catch up"; sleep 1; done; rm $TempDirectory/.ready # as above
# some buggy versions of pnmrotate don't like -background=white on a PPM (2-colour) image, so we need to make sure it's at least greyscale first, ideally in the same pipe. pnmtopng/pngtopnm doesn't always do it. Piping through ppmtopcx/pcxtoppm or pnmtorle/rletopnm seems to work.
# (we also allow for very old versions of pnmrotate that don't have the -background=white switch)
# (and we use 1.0,1.0,1.0 instead of 'white' in case rgb.txt isn't properly present on the system)
(anytopnm "$1" | pnmtorle | rletopnm | (pnmrotate -background=1.0,1.0,1.0 -noantialias $Deg 2>/dev/null || pnmrotate -noantialias $Deg) | pnmtopng -compression 9 > "$1.new" && rm "$1" && mv "$1.new" "$AsPng"; touch $TempDirectory/.ready) &
export NeedRemove="$AsPng $NeedRemove" # hope no spaces in there
shift
fi
(cd "$HOME";rm .gimp*/tmp/*-area.png)
done
while ! test -e $TempDirectory/.ready; do echo "Waiting for netpbm to catch up"; sleep 1; done
if [ "$NeedRemove" ]; then clear; fi
echo "All images have been de-rotated."
rm -rf $TempDirectory
if [ ! "$NeedRemove" ]; then exit; fi
echo "One more thing: You may need to manually remove any large
marks at the edges of the scan; these are quite likely if
the document was rotated when the area to scan was selected,
and they can confuse further processing (especially marks to
the left and right of the text). So we will now run gimp
again, on just the files that have been de-rotated last
time. For each one, select any unwanted marks, Cut
(Control-X), Save (Control-S) and Quit (Control-Q). (NB if
you need to select more than one mark then you may need to
click outside each selection before making the next one)"
echo "Press Enter to start."
read
for I in $NeedRemove; do
if test -d /cygdrive; then
# looks like we're on CygWin - this is tricky
cp "$I" /cygdrive/c/Program*Files/GIMP*/bin
pushd /cygdrive/c/Program*Files/GIMP*/bin
./gimp*.exe -d -s "$I"
#echo "Press Enter when Gimp has terminated" ; read
mv "$I" "$OLDPWD"
popd
else gimp -d -s "$I"; fi; done
echo "All done."
|
#!/bin/sh
#
# Android
#
# This installs Android studio from the stable channel
#echo "› brew cask install android-studio"
#brew cask install android-studio
pwd
#ls $ZSH/android/path.zsh
#source $ZSH/android/path.zsh
#echo "› sdkmanager --update"
#$ANDROID_HOME/tools/bin/sdkmanager --update
|
#include <bits/stdc++.h>
using namespace std;
typedef long long ll;
void printSubset(vector<int> vec, int n)
{
for (int i = 0; i < (1 << n); i++)
{
for (int j = 0; j < n; j++)
{
if (i & (1 << j))
{
cout << vec[j] << " ";
}
}
cout << endl;
}
}
int main(void)
{
int n;
cin >> n;
vector<int> vec(n);
for (int i = 0; i < n; i++)
{
cin >> vec[i];
}
printSubset(vec, n);
} |
<filename>AutoRotate/app/src/main/java/com/prat/autorotate/MainActivity.java
package com.prat.autorotate;
import android.app.Activity;
import android.app.Service;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.pm.ActivityInfo;
import android.graphics.PixelFormat;
import android.os.Bundle;
import android.os.IBinder;
import android.provider.Settings;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.WindowManager;
import android.widget.CompoundButton;
import android.widget.LinearLayout;
import android.widget.ToggleButton;
public class MainActivity extends Activity {
boolean mIsBound = false;
// @Override
// protected void onDestroy() {
// removeForceOrientation(this);
// mView = null;
// super.onDestroy();
// }
private OrientationService mBoundService;
private ServiceConnection mConnection = new ServiceConnection() {
public void onServiceConnected(ComponentName className, IBinder service) {
mBoundService = ((OrientationService.OrientationServiceBinder) service).getService();
}
public void onServiceDisconnected(ComponentName className) {
mBoundService = null;
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
doBindService();
ToggleButton mButton = (ToggleButton) findViewById(R.id.button);
ToggleButton mToggleButton = (ToggleButton) findViewById(R.id.toggleButton);
mButton.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
if (mIsBound && mBoundService != null) {
mBoundService.setAutoOrientationEnabled(getApplicationContext(), false);
}
} else {
if (mIsBound && mBoundService != null) {
mBoundService.setAutoOrientationEnabled(getApplicationContext(), true);
}
}
}
});
mToggleButton.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
if (mIsBound && mBoundService != null) {
mBoundService.forceOrientation(getApplicationContext(),
ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}
} else {
if (mIsBound && mBoundService != null) {
mBoundService.removeForceOrientation(getApplicationContext());
}
}
}
});
// Log.d(MainActivity.class.getSimpleName(),
// "AutoOrientationEnabled "
// + getAutoOrientationEnabled(MainActivity.this));
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
void doBindService() {
bindService(new Intent(MainActivity.this,
OrientationService.class), mConnection, Context.BIND_AUTO_CREATE);
mIsBound = true;
}
void doUnbindService() {
if (mIsBound) {
// Detach our existing connection.
unbindService(mConnection);
mIsBound = false;
}
}
@Override
protected void onDestroy() {
super.onDestroy();
doUnbindService();
}
}
//
// /**
// * Change the desired orientation.
// *
// * @param orientation An orientation constant as used in
// * {@link android.content.pm.ActivityInfo#screenOrientation
// * ActivityInfo.screenOrientation}.
// */
// public void forceOrientation(Context context, int orientation) {
// mView = new LinearLayout(context);
// WindowManager.LayoutParams layoutParams =
// new WindowManager.LayoutParams(
// WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY, 0,
// PixelFormat.RGBA_8888);
// layoutParams.screenOrientation
// = orientation; //ActivityInfo.SCREEN_ORIENTATION_USER_LANDSCAPE;
// WindowManager wm = (WindowManager) context.getSystemService(Service.WINDOW_SERVICE);
// wm.addView(mView, layoutParams);
// mView.setVisibility(View.VISIBLE);
// }
//
// public void removeForceOrientation(Context context) {
// if (mView != null) {
// WindowManager wm = (WindowManager) context.getSystemService(Service.WINDOW_SERVICE);
// mView.setVisibility(View.GONE);
// wm.removeView(mView);
// //wm.removeViewImmediate(mView);
// }
// }
//
// public int getAutoOrientationEnabled(Context context) {
// try {
// return Settings.System
// .getInt(context.getContentResolver(), Settings.System.ACCELEROMETER_ROTATION);
// } catch (Settings.SettingNotFoundException e) {
// e.printStackTrace();
// return -1;
// }
// }
//
// public void setAutoOrientationEnabled(Context context, boolean enabled) {
// Settings.System.putInt(context.getContentResolver(), Settings.System.ACCELEROMETER_ROTATION,
// enabled ? 1 : 0);
// }
|
<filename>enterprise-modules/charts/src/charts/chartComp/chartTranslator.ts
import { Bean, BeanStub } from "@ag-grid-community/core";
@Bean("chartTranslator")
export class ChartTranslator extends BeanStub {
private static DEFAULT_TRANSLATIONS: { [name: string]: string; } = {
pivotChartTitle: 'Pivot Chart',
rangeChartTitle: 'Range Chart',
settings: 'Settings',
data: 'Data',
format: 'Format',
categories: 'Categories',
defaultCategory: '(None)',
series: 'Series',
xyValues: 'X Y Values',
paired: 'Paired Mode',
axis: 'Axis',
navigator: 'Navigator',
color: 'Color',
thickness: 'Thickness',
xType: 'X Type',
automatic: 'Automatic',
category: 'Category',
number: 'Number',
time: 'Time',
xRotation: 'X Rotation',
yRotation: 'Y Rotation',
ticks: 'Ticks',
width: 'Width',
height: 'Height',
length: 'Length',
padding: 'Padding',
spacing: 'Spacing',
chart: 'Chart',
title: 'Title',
titlePlaceholder: 'Chart title - double click to edit',
background: 'Background',
font: 'Font',
top: 'Top',
right: 'Right',
bottom: 'Bottom',
left: 'Left',
labels: 'Labels',
size: 'Size',
shape: 'Shape',
minSize: 'Minimum Size',
maxSize: 'Maximum Size',
legend: 'Legend',
position: 'Position',
markerSize: 'Marker Size',
markerStroke: 'Marker Stroke',
markerPadding: 'Marker Padding',
itemSpacing: 'Item Spacing',
itemPaddingX: 'Item Padding X',
itemPaddingY: 'Item Padding Y',
layoutHorizontalSpacing: 'Horizontal Spacing',
layoutVerticalSpacing: 'Vertical Spacing',
strokeWidth: 'Stroke Width',
offset: 'Offset',
offsets: 'Offsets',
tooltips: 'Tooltips',
callout: 'Callout',
markers: 'Markers',
shadow: 'Shadow',
blur: 'Blur',
xOffset: 'X Offset',
yOffset: 'Y Offset',
lineWidth: 'Line Width',
normal: 'Normal',
bold: 'Bold',
italic: 'Italic',
boldItalic: 'Bold Italic',
predefined: 'Predefined',
fillOpacity: 'Fill Opacity',
strokeOpacity: 'Line Opacity',
histogramBinCount: 'Bin count',
columnGroup: 'Column',
barGroup: 'Bar',
pieGroup: 'Pie',
lineGroup: 'Line',
scatterGroup: 'X Y (Scatter)',
areaGroup: 'Area',
histogramGroup: 'Histogram',
groupedColumnTooltip: 'Grouped',
stackedColumnTooltip: 'Stacked',
normalizedColumnTooltip: '100% Stacked',
groupedBarTooltip: 'Grouped',
stackedBarTooltip: 'Stacked',
normalizedBarTooltip: '100% Stacked',
pieTooltip: 'Pie',
doughnutTooltip: 'Doughnut',
lineTooltip: 'Line',
groupedAreaTooltip: 'Area',
stackedAreaTooltip: 'Stacked',
normalizedAreaTooltip: '100% Stacked',
scatterTooltip: 'Scatter',
bubbleTooltip: 'Bubble',
histogramTooltip: 'Histogram',
noDataToChart: 'No data available to be charted.',
pivotChartRequiresPivotMode: 'Pivot Chart requires Pivot Mode enabled.',
};
public translate(toTranslate: string): string {
const translate = this.gridOptionsWrapper.getLocaleTextFunc();
const defaultTranslation = ChartTranslator.DEFAULT_TRANSLATIONS[toTranslate];
return translate(toTranslate, defaultTranslation);
}
}
|
<reponame>zrwusa/expo-bunny
import React from 'react';
import {makeStyles} from './styles';
import {IcoMoon} from '../UI';
import {LinearGradientIconProps} from './LinearGradientIcon';
import {useBunnyKit} from '../../hooks/bunny-kit';
export function LinearGradientIcon(props: LinearGradientIconProps) {
const {sizeLabor, themeLabor, wp} = useBunnyKit();
const {name, size, colors} = props;
const {theme} = themeLabor;
const {designsBasedOn} = sizeLabor;
const finalSize = size || wp(20),
colorsDefault = [theme.colors.backgroundBtn, theme.colors.backgroundBtn2];
const styles = makeStyles(sizeLabor, themeLabor);
return (
<IcoMoon
name={name}
size={finalSize}
color={colors ? colors[0] : colorsDefault[0]}
/>
);
}
|
#!/bin/bash
nohup /home/pi/grid-antenna-control/cpp/sp4t_control
|
from django.conf.urls import url
from easy_auth.views import (
LoginAuthView,
LogoutAuthView,
PasswordChangeAuthView,
PasswordChangeDoneAuthView,
PasswordResetAuthView,
PasswordResetConfirmAuthView,
PasswordResetDoneAuthView,
PasswordResetCompleteAuthView
)
urlpatterns = [
url(r'^change_password_done', PasswordChangeDoneAuthView.as_view(), name='change_password_done'),
url(r'^change_password', PasswordChangeAuthView.as_view(), name='change_password'),
url(r'^login', LoginAuthView.as_view(), name='login'),
url(r'^logout', LogoutAuthView.as_view(), name='logout'),
url(r'^password_reset_confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
PasswordResetConfirmAuthView.as_view(),
name='password_reset_confirm'
),
url(r'^password_reset_complete', PasswordResetCompleteAuthView.as_view(), name='password_reset_complete'),
url(r'^password_reset_done', PasswordResetDoneAuthView.as_view(), name='password_reset_done'),
url(r'^password_reset', PasswordResetAuthView.as_view(), name='password_reset'),
]
|
#!/usr/bin/env bash
# Copyright 2019 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This is a helper script for Knative E2E test scripts.
# See README.md for instructions on how to use it.
source "$(dirname "${BASH_SOURCE[0]:-$0}")/infra-library.sh"
readonly TEST_RESULT_FILE=/tmp/${REPO_NAME}-e2e-result
# Flag whether test is using a boskos GCP project
IS_BOSKOS=0
# Tear down the test resources.
function teardown_test_resources() {
# On boskos, save time and don't teardown as the cluster will be destroyed anyway.
(( IS_BOSKOS )) && return
header "Tearing down test environment"
if function_exists test_teardown; then
test_teardown
fi
if function_exists knative_teardown; then
knative_teardown
fi
}
# Run the given E2E tests. Assume tests are tagged e2e, unless `-tags=XXX` is passed.
# Parameters: $1..$n - any go test flags, then directories containing the tests to run.
function go_test_e2e() {
local go_test_args=()
[[ ! " $*" == *" -tags="* ]] && go_test_args+=("-tags=e2e")
[[ ! " $*" == *" -count="* ]] && go_test_args+=("-count=1")
[[ ! " $*" == *" -race"* ]] && go_test_args+=("-race")
# Remove empty args as `go test` will consider it as running tests for the current directory, which is not expected.
for arg in "$@"; do
[[ -n "$arg" ]] && go_test_args+=("$arg")
done
report_go_test "${go_test_args[@]}"
}
# Setup the test cluster for running the tests.
function setup_test_cluster() {
# Fail fast during setup.
set -o errexit
set -o pipefail
header "Setting up test cluster"
kubectl get nodes
# Set the actual project the test cluster resides in
# It will be a project assigned by Boskos if test is running on Prow,
# otherwise will be ${E2E_GCP_PROJECT_ID} set up by user.
E2E_PROJECT_ID="$(gcloud config get-value project)"
export E2E_PROJECT_ID
readonly E2E_PROJECT_ID
local k8s_cluster
k8s_cluster=$(kubectl config current-context)
is_protected_cluster "${k8s_cluster}" && \
abort "kubeconfig context set to ${k8s_cluster}, which is forbidden"
# Setup KO_DOCKER_REPO if it is a GKE cluster. Incorporate an element of
# randomness to ensure that each run properly publishes images. Don't
# owerwrite KO_DOCKER_REPO if already set.
[ -z "${KO_DOCKER_REPO}" ] && \
[[ "${k8s_cluster}" =~ ^gke_.* ]] && \
export KO_DOCKER_REPO=gcr.io/${E2E_PROJECT_ID}/${REPO_NAME}-e2e-img/${RANDOM}
# Safety checks
is_protected_gcr "${KO_DOCKER_REPO}" && \
abort "\$KO_DOCKER_REPO set to ${KO_DOCKER_REPO}, which is forbidden"
# Use default namespace for all subsequent kubectl commands in this context
kubectl config set-context "${k8s_cluster}" --namespace=default
echo "- Cluster is ${k8s_cluster}"
echo "- Docker is ${KO_DOCKER_REPO}"
export KO_DATA_PATH="${REPO_ROOT_DIR}/.git"
# Do not run teardowns if we explicitly want to skip them.
(( ! SKIP_TEARDOWNS )) && add_trap teardown_test_resources EXIT
# Handle failures ourselves, so we can dump useful info.
set +o errexit
set +o pipefail
# Wait for Istio installation to complete, if necessary, before calling knative_setup.
# TODO(chizhg): is it really needed?
(( ! SKIP_ISTIO_ADDON )) && (wait_until_batch_job_complete istio-system || return 1)
if function_exists knative_setup; then
knative_setup || fail_test "Knative setup failed"
fi
if function_exists test_setup; then
test_setup || fail_test "test setup failed"
fi
}
# Signal (as return code and in the logs) that all E2E tests passed.
function success() {
echo "**************************************"
echo "*** E2E TESTS PASSED ***"
echo "**************************************"
dump_metrics
exit 0
}
# Exit test, dumping current state info.
# Parameters: $1 - error message (optional).
function fail_test() {
[[ -n $1 ]] && echo "ERROR: $1"
dump_cluster_state
dump_metrics
exit 1
}
SKIP_TEARDOWNS=0
SKIP_ISTIO_ADDON=0
E2E_SCRIPT=""
CLOUD_PROVIDER="gke"
# Parse flags and initialize the test cluster.
function initialize() {
local run_tests=0
local custom_flags=()
E2E_SCRIPT="$(get_canonical_path "$0")"
local e2e_script_command=( "${E2E_SCRIPT}" "--run-tests" )
cd "${REPO_ROOT_DIR}"
while [[ $# -ne 0 ]]; do
local parameter=$1
# TODO(chizhg): remove parse_flags logic if no repos are using it.
# Try parsing flag as a custom one.
if function_exists parse_flags; then
parse_flags "$@"
local skip=$?
if [[ ${skip} -ne 0 ]]; then
# Skip parsed flag (and possibly argument) and continue
# Also save it to it's passed through to the test script
for ((i=1;i<=skip;i++)); do
e2e_script_command+=("$1")
shift
done
continue
fi
fi
# Try parsing flag as a standard one.
case ${parameter} in
--run-tests) run_tests=1 ;;
--skip-teardowns) SKIP_TEARDOWNS=1 ;;
# TODO(chizhg): remove this flag once the addons is defined as an env var.
--skip-istio-addon) SKIP_ISTIO_ADDON=1 ;;
*)
case ${parameter} in
--cloud-provider) shift; CLOUD_PROVIDER="$1" ;;
*) custom_flags+=("$parameter") ;;
esac
esac
shift
done
(( IS_PROW )) && [[ -z "${GCP_PROJECT_ID:-}" ]] && IS_BOSKOS=1
if [[ "${CLOUD_PROVIDER}" == "gke" ]]; then
if (( SKIP_ISTIO_ADDON )); then
custom_flags+=("--addons=NodeLocalDNS")
else
custom_flags+=("--addons=Istio,NodeLocalDNS")
fi
fi
readonly IS_BOSKOS
readonly SKIP_TEARDOWNS
if (( ! run_tests )); then
create_test_cluster "${CLOUD_PROVIDER}" custom_flags e2e_script_command
else
setup_test_cluster
fi
}
|
<filename>drivers/chainex.js<gh_stars>10-100
const Driver = require('../models/driver');
const request = require('../lib/request');
const Ticker = require('../models/ticker');
const { parseToFloat } = require('../lib/utils');
/**
* @memberof Driver
* @augments Driver
*/
class ChainEx extends Driver {
/**
* @augments Driver.fetchTickers
* @returns {Promise.Array<Ticker>} Returns a promise of an array with tickers.
*/
async fetchTickers() {
const { data: tickers } = await request('https://api.chainex.io/market/summary/');
return tickers.map((ticker) => {
const [base, quote] = ticker.market.split('/');
return new Ticker({
base,
baseName: ticker.name,
quote,
high: parseToFloat(ticker['24hhigh']),
low: parseToFloat(ticker['24hlow']),
close: parseToFloat(ticker.last_price),
bid: parseToFloat(ticker.top_bid),
ask: parseToFloat(ticker.top_ask),
baseVolume: parseToFloat(ticker.volume_amount),
quoteVolume: parseToFloat(ticker['24hvol']),
});
});
}
}
module.exports = ChainEx;
|
<gh_stars>0
package route
import (
"math/rand"
"time"
pb "github.com/gotway/service-examples/pkg/route/pb"
)
var ValidPoint = &pb.Point{Latitude: 409146138, Longitude: -746188906}
var InvalidPoint = &pb.Point{Latitude: 0, Longitude: 0}
var Rect = &pb.Rectangle{
Lo: &pb.Point{Latitude: 410000000, Longitude: -740000000},
Hi: &pb.Point{Latitude: 415000000, Longitude: -745000000},
}
func randomPoints() []*pb.Point {
r := rand.New(rand.NewSource(time.Now().UnixNano()))
pointCount := int(r.Int31n(100)) + 2
var points []*pb.Point
for i := 0; i < pointCount; i++ {
points = append(points, randomPoint(r))
}
return points
}
func randomPoint(r *rand.Rand) *pb.Point {
lat := (r.Int31n(180) - 90) * 1e7
long := (r.Int31n(360) - 180) * 1e7
return &pb.Point{Latitude: lat, Longitude: long}
}
func notes() []*pb.RouteNote {
return []*pb.RouteNote{
{Location: &pb.Point{Latitude: 0, Longitude: 1}, Message: "First message"},
{Location: &pb.Point{Latitude: 0, Longitude: 2}, Message: "Second message"},
{Location: &pb.Point{Latitude: 0, Longitude: 3}, Message: "Third message"},
{Location: &pb.Point{Latitude: 0, Longitude: 1}, Message: "Fourth message"},
{Location: &pb.Point{Latitude: 0, Longitude: 2}, Message: "Fifth message"},
{Location: &pb.Point{Latitude: 0, Longitude: 3}, Message: "Sixth message"},
}
}
|
<reponame>saucelabs/travis-core
require 'data_migrations'
class CreateRequestsCommitsAndTasks < ActiveRecord::Migration
def self.up
change_table :builds do |t|
t.references :commit
t.references :request
t.string :state
end
create_table :commits, :force => true do |t|
t.references :repository
t.string :commit # would love to call this column :hash, but apparently FactoryGirl wouldn't >:/
t.string :ref
t.string :branch
t.text :message
t.string :compare_url
t.datetime :committed_at
t.string :committer_name
t.string :committer_email
t.string :author_name
t.string :author_email
t.timestamps
end
create_table :requests, :force => true do |t|
t.references :repository
t.references :commit
t.string :state
t.string :source
t.text :payload
t.string :token
t.text :config
t.string :commit # temp, for data migrations, so we can update the commit_id
t.datetime :started_at
t.datetime :finished_at
t.timestamps
end
create_table :tasks, :force => true do |t|
t.references :repository
t.references :commit
t.references :owner, :polymorphic => true
t.string :queue
t.string :type
t.string :state
t.string :number
t.text :config
t.integer :status
t.text :log, :default => ''
t.string :job_id
t.string :worker
t.string :commit # temp, for data migrations, so we can update the commit_id
t.datetime :started_at
t.datetime :finished_at
t.timestamps
end
migrate_table :builds, :to => :commits do |t|
t.copy :repository_id, :created_at, :updated_at, :commit,
:ref, :branch, :message, :compare_url, :committed_at,
:committer_name, :committer_email, :author_name, :author_email
t.remove :ref, :branch, :message, :compare_url, :committed_at,
:committer_name, :committer_email, :author_name, :author_email
end
migrate_table :builds, :to => :requests do |t|
t.copy :repository_id, :config, :created_at, :updated_at, :commit, :started_at, :finished_at
t.move :github_payload, :token, :to => [:payload, :token]
t.set :state, 'finished'
t.set :source, 'github'
end
migrate_table :builds, :to => :tasks do |t|
t.where 'parent_id IS NOT NULL OR parent_id IS NULL AND (SELECT COUNT(*) FROM builds AS children WHERE children.id = builds.id) = 0'
t.copy :number, :status, :started_at, :finished_at, :commit, :config, :log
t.remove :log
t.copy :parent_id, :to => :owner_id
t.set :owner_type, 'Build'
t.set :type, 'Task::Test'
t.set :state, 'finished'
end
add_index :commits, :commit
add_index :builds, :commit
add_index :requests, :commit
add_index :tasks, :commit
execute 'UPDATE requests SET commit_id = (SELECT commits.id FROM commits WHERE commits.commit = requests.commit LIMIT 1)'
execute 'UPDATE tasks SET commit_id = (SELECT commits.id FROM commits WHERE commits.commit = tasks.commit LIMIT 1)'
execute 'DELETE FROM builds WHERE parent_id IS NOT NULL'
execute 'UPDATE builds SET request_id = (SELECT requests.id FROM requests WHERE requests.commit = builds.commit LIMIT 1)'
execute 'UPDATE builds SET commit_id = (SELECT commits.id FROM commits WHERE commits.commit = builds.commit LIMIT 1)'
# execute "DROP SEQUENCE shared_builds_tasks_seq" rescue nil
execute "CREATE SEQUENCE shared_builds_tasks_seq START WITH #{[Build.maximum(:id), (Task.maximum(:id) rescue 0)].compact.max.to_i + 1} CACHE 30"
execute "ALTER TABLE builds ALTER COLUMN id TYPE BIGINT"
execute "ALTER TABLE builds ALTER COLUMN id SET DEFAULT nextval('shared_builds_tasks_seq')"
execute "ALTER TABLE tasks ALTER COLUMN id TYPE BIGINT"
execute "ALTER TABLE tasks ALTER COLUMN id SET DEFAULT nextval('shared_builds_tasks_seq')"
%w(commits requests tasks).each do |table_name|
execute "SELECT setval('#{table_name}_id_seq', #{select_value("SELECT max(id) FROM #{table_name}").to_i + 1})"
end
remove_column :builds, :parent_id
remove_column :builds, :commit
remove_column :requests, :commit
remove_column :tasks, :commit
end
def self.down
# TODO complete this
#
# change_table :builds do |t|
# t.text :github_payload
# t.string :token
# t.string :commit
# t.string :ref
# t.string :branch
# t.text :message
# t.string :compare_url
# t.datetime :committed_at
# t.string :committer_name
# t.string :committer_email
# t.string :author_name
# t.string :author_email
# t.references :parent_id
# t.integer :status
# t.text :log, :default => ''
# t.string :worker
# t.remove :commit_id
# t.remove :request_id
# end
# migrate_table :commits, :to => :builds do |t|
# t.copy :commit, :ref, :branch, :message, :compare_url, :committed_at,
# :committer_name, :committer_email, :author_name, :author_email
# end
# migrate_table :requests, :to => :builds do |t|
# t.copy :token, :payload, :to => [:token, :github_payload]
# end
# migrate_table :tasks, :to => :builds do |t|
# t.copy :status, :log
# t.copy :owner_id, :to => :parent_id
# end
# drop_table :commits
# drop_table :requests
end
end
|
import UrlParser from '../../routes/url-parser';
import RestaurantSource from '../../data/restaurant-source';
import { restaurantDetailTemplate } from '../templates/template-html';
import LikeButtonPresenter from '../../utils/like-button-presenter';
import PostReview from '../../utils/post-review';
import Spinner from '../templates/spinner-html';
import FavRestaurantIdb from '../../data/restaurant-idb';
const Detail = {
async render() {
return `
<div class="container">
<div id="loading"></div>
<div class="main">
<h2 class="title-container">Detail Restaurant</h2>
<section id="detail-rest"></section>
<div class="like" id="likeButtonContainer"></div>
<div class="form-review">
<form>
<div class="mb-3">
<label for="inputName" class="form-label">Name</label>
<input name="inputName" type="text" class="form-control" id="inputName">
</div>
<div class="mb-3">
<label for="inputReview" class="form-label">Review</label>
<input name="inputReview" type="text" class="form-control" id="inputReview">
</div>
<button id="submit-review" type="submit" class="btn2">Submit</button>
</form>
</div>
</div>
</div>
`;
},
async afterRender() {
const url = UrlParser.parseActiveUrlWithoutCombiner();
const detailContainer = document.querySelector('#detail-rest');
const loading = document.querySelector('#loading');
const main = document.querySelector('.main');
loading.innerHTML = Spinner();
main.style.display = 'none';
try {
const data = await RestaurantSource.detailRestaurant(url.id);
detailContainer.innerHTML += restaurantDetailTemplate(data.restaurant);
await LikeButtonPresenter.init({
likeButtonContainer: document.querySelector('#likeButtonContainer'),
favoriteRestaurant: FavRestaurantIdb,
data,
});
main.style.display = 'block';
loading.style.display = 'none';
} catch (err) {
detailContainer.innerHTML = `Error: ${err}, swipe up to refresh!`;
main.style.display = 'block';
loading.style.display = 'none';
}
const btnSubmit = document.querySelector('#submit-review');
const nameInput = document.querySelector('#inputName');
const reviewInput = document.querySelector('#inputReview');
btnSubmit.addEventListener('click', (e) => {
e.preventDefault();
if (nameInput.value === '' || reviewInput.value === '') {
// eslint-disable-next-line no-alert
alert('Inputan tidak boleh ada yang kosong');
nameInput.value = '';
reviewInput.value = '';
} else {
PostReview(url, nameInput.value, reviewInput.value);
nameInput.value = '';
reviewInput.value = '';
}
});
},
};
export default Detail;
|
<filename>gatsby-config.js
module.exports = {
siteMetadata: {
title: `Luke Willis`,
description: `Just some dude`,
author: `@lukemwillis`,
linkedinUsername: "lukemwillis",
stackOverflowUserId: "2479481",
},
plugins: [
{
resolve: `gatsby-plugin-google-analytics`,
options: {
trackingId: process.env.GA_TRACKING_ID,
anonymize: true,
respectDNT: true,
},
},
`gatsby-plugin-react-helmet`,
{
resolve: `gatsby-source-filesystem`,
options: {
name: `pages`,
path: `${__dirname}/src/pages/`,
},
},
{
resolve: `gatsby-source-filesystem`,
options: {
name: `images`,
path: `${__dirname}/src/images`,
},
},
`gatsby-plugin-mdx`,
`gatsby-transformer-sharp`,
`gatsby-plugin-sharp`,
{
resolve: `gatsby-plugin-manifest`,
options: {
name: `<NAME>`,
short_name: `Luke`,
start_url: `/`,
background_color: `#663399`,
theme_color: `#663399`,
display: `minimal-ui`,
icon: `src/images/luke-face.png`, // This path is relative to the root of the site.
},
},
`gatsby-plugin-offline`,
],
}
|
#!/bin/sh
process_stanford_scene()
{
DATA_NAME=$1
GDRIVE_ID=$2
./gdrive_download.sh $GDRIVE_ID
unzip --qq $DATA_NAME"_png.zip" -d ../dataset/stanford/$DATA_NAME
rm $DATA_NAME"_png.zip"
}
mkdir ../dataset/stanford
process_stanford_scene "burghers" "0B6qjzcYetERgUU0wMkhnZVNCa28"
process_stanford_scene "lounge" "0B6qjzcYetERgSUZFT2FWdWsxQzQ"
process_stanford_scene "copyroom" "0B6qjzcYetERgWTBDYWdkVHN3aHc"
process_stanford_scene "cactusgarden" "0B6qjzcYetERgYUxUSFFIYjZIb3c"
process_stanford_scene "stonewall" "0B6qjzcYetERgOXBCM181bTdsUGc"
process_stanford_scene "totempole" "0B6qjzcYetERgNjVEWm5sSWFlWk0"
|
public class ExampleClass {
public static void main(String[] args) {
// Code logic goes here
}
} |
def perform_substitutions(text, substitutions):
for key, value in substitutions.items():
text = text.replace("{0} ".format(key), u"\xb7{0} ".format(value)) # added a trailing space to only replace exact name matches
return text |
#!/bin/bash
nohup redis-server & |
# Run this file as source local-testing.sh from sweater-comb root, then you can run sweater-comb-local anywhere during testing :)
CURRENT_ROOT=$(pwd)
alias sweater-comb-local='ts-node $CURRENT_ROOT/src/index.ts'
|
<filename>ipythonblocks/ipythonblocks.py
"""
ipythonblocks provides a BlockGrid class that displays a colored grid in the
IPython Notebook. The colors can be manipulated, making it useful for
practicing control flow stuctures and quickly seeing the results.
"""
# This file is copyright 2013 by <NAME> and covered by the license at
# https://github.com/jiffyclub/ipythonblocks/blob/master/LICENSE.txt
import copy
import collections
import json
import numbers
import os
import sys
import time
import uuid
from operator import iadd
from functools import reduce
from IPython.display import HTML, display, clear_output
from IPython.display import Image as ipyImage
__all__ = ('Block', 'BlockGrid', 'Pixel', 'ImageGrid',
'InvalidColorSpec', 'ShapeMismatch', 'show_color',
'embed_colorpicker', 'clear', 'colors', 'fui_colors', '__version__')
__version__ = '1.9.dev'
_TABLE = ('<style type="text/css">'
'table.blockgrid {{border: none;}}'
' .blockgrid tr {{border: none;}}'
' .blockgrid td {{padding: 0px;}}'
' #blocks{0} td {{border: {1}px solid white;}}'
'</style>'
'<table id="blocks{0}" class="blockgrid"><tbody>{2}</tbody></table>')
_TR = '<tr>{0}</tr>'
_TD = ('<td title="{0}" style="width: {1}px; height: {1}px;'
'background-color: {2};"></td>')
_RGB = 'rgb({0}, {1}, {2})'
_TITLE = 'Index: [{0}, {1}] Color: ({2}, {3}, {4})'
_SINGLE_ITEM = 'single item'
_SINGLE_ROW = 'single row'
_ROW_SLICE = 'row slice'
_DOUBLE_SLICE = 'double slice'
_SMALLEST_BLOCK = 1
_POST_URL = 'http://www.ipythonblocks.org/post'
_GET_URL_PUBLIC = 'http://www.ipythonblocks.org/get/{0}'
_GET_URL_SECRET = 'http://www.ipythonblocks.org/get/secret/{0}'
class InvalidColorSpec(Exception):
"""
Error for a color value that is not a number.
"""
pass
class ShapeMismatch(Exception):
"""
Error for when a grid assigned to another doesn't have the same shape.
"""
pass
def clear():
"""
Clear the output of the current cell.
This is a thin wrapper around IPython.display.clear_output.
"""
clear_output()
def show_color(red, green, blue):
"""
Show a given color in the IPython Notebook.
Parameters
----------
red, green, blue : int
Integers on the range [0 - 255].
"""
div = ('<div style="height: 60px; min-width: 200px; '
'background-color: {0}"></div>')
display(HTML(div.format(_RGB.format(red, green, blue))))
def embed_colorpicker():
"""
Embed the web page www.colorpicker.com inside the IPython Notebook.
"""
iframe = ('<iframe src="http://www.colorpicker.com/" '
'width="100%" height="550px"></iframe>')
display(HTML(iframe))
def _color_property(name):
real_name = "_" + name
@property
def prop(self):
return getattr(self, real_name)
@prop.setter
def prop(self, value):
value = Block._check_value(value)
setattr(self, real_name, value)
return prop
def _flatten(thing, ignore_types=(str,)):
"""
Yield a single item or str/unicode or recursively yield from iterables.
Adapted from Beazley's Python Cookbook.
"""
if isinstance(thing, collections.Iterable) and \
not isinstance(thing, ignore_types):
for i in thing:
for x in _flatten(i):
yield x
else:
yield thing
def _parse_str_cell_spec(cells, length):
"""
Parse a single string cell specification representing either a single
integer or a slice.
Parameters
----------
cells : str
E.g. '5' for an int or '5:9' for a slice.
length : int
The number of items in the user's In history list. Used for
normalizing slices.
Returns
-------
cell_nos : list of int
"""
if ':' not in cells:
return _parse_cells_spec(int(cells), length)
else:
return _parse_cells_spec(slice(*[int(x) if x else None
for x in cells.split(':')]),
length)
def _parse_cells_spec(cells, length):
"""
Used by _get_code_cells to parse a cell specification string into an
ordered list of cell numbers.
Parameters
----------
cells : str, int, or slice
Specification of which cells to retrieve. Can be a single number,
a slice, or a combination of either separated by commas.
length : int
The number of items in the user's In history list. Used for
normalizing slices.
Returns
-------
cell_nos : list of int
Ordered list of cell numbers derived from spec.
"""
if isinstance(cells, int):
return [cells]
elif isinstance(cells, slice):
return list(range(*cells.indices(length)))
else:
# string parsing
return sorted(set(_flatten(_parse_str_cell_spec(s, length)
for s in cells.split(','))))
def _get_code_cells(cells):
"""
Get the inputs of the specified cells from the notebook.
Parameters
----------
cells : str, int, or slice
Specification of which cells to retrieve. Can be a single number,
a slice, or a combination of either separated by commas.
Returns
-------
code : list of str
Contents of cells as strings in chronological order.
"""
In = get_ipython().user_ns['In']
cells = _parse_cells_spec(cells, len(In))
return [In[x] for x in cells]
class Block(object):
"""
A colored square.
Parameters
----------
red, green, blue : int
Integers on the range [0 - 255].
size : int, optional
Length of the sides of this block in pixels. One is the lower limit.
Attributes
----------
red, green, blue : int
The color values for this `Block`. The color of the `Block` can be
updated by assigning new values to these attributes.
rgb : tuple of int
Tuple of (red, green, blue) values. Can be used to set all the colors
at once.
row, col : int
The zero-based grid position of this `Block`.
size : int
Length of the sides of this block in pixels. The block size can be
changed by modifying this attribute. Note that one is the lower limit.
"""
red = _color_property('red')
green = _color_property('green')
blue = _color_property('blue')
def __init__(self, red, green, blue, size=20):
self.red = red
self.green = green
self.blue = blue
self.size = size
self._row = None
self._col = None
@staticmethod
def _check_value(value):
"""
Check that a value is a number and constrain it to [0 - 255].
"""
if not isinstance(value, numbers.Number):
s = 'value must be a number. got {0}.'.format(value)
raise InvalidColorSpec(s)
return int(round(min(255, max(0, value))))
@property
def rgb(self):
return (self._red, self._green, self._blue)
@rgb.setter
def rgb(self, colors):
if len(colors) != 3:
s = 'Setting colors requires three values: (red, green, blue).'
raise ValueError(s)
self.red, self.green, self.blue = colors
@property
def row(self):
return self._row
@property
def col(self):
return self._col
@property
def size(self):
return self._size
@size.setter
def size(self, size):
self._size = max(_SMALLEST_BLOCK, size)
def set_colors(self, red, green, blue):
"""
Updated block colors.
Parameters
----------
red, green, blue : int
Integers on the range [0 - 255].
"""
self.red = red
self.green = green
self.blue = blue
def _update(self, other):
if isinstance(other, Block):
self.rgb = other.rgb
self.size = other.size
elif isinstance(other, collections.Sequence) and len(other) == 3:
self.rgb = other
else:
errmsg = (
'Value must be a Block or a sequence of 3 integers. '
'Got {0!r}.'
)
raise ValueError(errmsg.format(other))
@property
def _td(self):
"""
The HTML for a table cell with the background color of this Block.
"""
title = _TITLE.format(self._row, self._col,
self._red, self._green, self._blue)
rgb = _RGB.format(self._red, self._green, self._blue)
return _TD.format(title, self._size, rgb)
def _repr_html_(self):
return _TABLE.format(uuid.uuid4(), 0, _TR.format(self._td))
def show(self):
display(HTML(self._repr_html_()))
__hash__ = None
def __eq__(self, other):
if not isinstance(other, Block):
return False
return self.rgb == other.rgb and self.size == other.size
def __str__(self):
s = ['{0}'.format(self.__class__.__name__),
'Color: ({0}, {1}, {2})'.format(self._red,
self._green,
self._blue)]
# add position information if we have it
if self._row is not None:
s[0] += ' [{0}, {1}]'.format(self._row, self._col)
return os.linesep.join(s)
def __repr__(self):
type_name = type(self).__name__
return '{0}({1}, {2}, {3}, size={4})'.format(type_name,
self.red,
self.green,
self.blue,
self.size)
class BlockGrid(object):
"""
A grid of blocks whose colors can be individually controlled.
Parameters
----------
width : int
Number of blocks wide to make the grid.
height : int
Number of blocks high to make the grid.
fill : tuple of int, optional
An optional initial color for the grid, defaults to black.
Specified as a tuple of (red, green, blue). E.g.: (10, 234, 198)
block_size : int, optional
Length of the sides of grid blocks in pixels. One is the lower limit.
lines_on : bool, optional
Whether or not to display lines between blocks.
Attributes
----------
width : int
Number of blocks along the width of the grid.
height : int
Number of blocks along the height of the grid.
shape : tuple of int
A tuple of (width, height).
block_size : int
Length of the sides of grid blocks in pixels. The block size can be
changed by modifying this attribute. Note that one is the lower limit.
lines_on : bool
Whether lines are shown between blocks when the grid is displayed.
This attribute can used to toggle the whether the lines appear.
"""
def __init__(self, width, height, fill=(0, 0, 0),
block_size=20, lines_on=True):
self._width = width
self._height = height
self._block_size = block_size
self.lines_on = lines_on
self._initialize_grid(fill)
def _initialize_grid(self, fill):
grid = [[Block(*fill, size=self._block_size)
for col in range(self.width)]
for row in range(self.height)]
self._grid = grid
@property
def width(self):
return self._width
@property
def height(self):
return self._height
@property
def shape(self):
return (self._width, self._height)
@property
def block_size(self):
return self._block_size
@block_size.setter
def block_size(self, size):
self._block_size = size
for block in self:
block.size = size
@property
def lines_on(self):
return self._lines_on
@lines_on.setter
def lines_on(self, value):
if value not in (0, 1):
s = 'lines_on may only be True or False.'
raise ValueError(s)
self._lines_on = value
def __eq__(self, other):
if not isinstance(other, BlockGrid):
return False
else:
# compare the underlying grids
return self._grid == other._grid
def _view_from_grid(self, grid):
"""
Make a new grid from a list of lists of Block objects.
"""
new_width = len(grid[0])
new_height = len(grid)
new_BG = self.__class__(new_width, new_height,
block_size=self._block_size,
lines_on=self._lines_on)
new_BG._grid = grid
return new_BG
@staticmethod
def _categorize_index(index):
"""
Used by __getitem__ and __setitem__ to determine whether the user
is asking for a single item, single row, or some kind of slice.
"""
if isinstance(index, int):
return _SINGLE_ROW
elif isinstance(index, slice):
return _ROW_SLICE
elif isinstance(index, tuple):
if len(index) > 2:
s = 'Invalid index, too many dimensions.'
raise IndexError(s)
elif len(index) == 1:
s = 'Single indices must be integers, not tuple.'
raise TypeError(s)
if isinstance(index[0], slice):
if isinstance(index[1], (int, slice)):
return _DOUBLE_SLICE
if isinstance(index[1], slice):
if isinstance(index[0], (int, slice)):
return _DOUBLE_SLICE
elif isinstance(index[0], int) and isinstance(index[0], int):
return _SINGLE_ITEM
raise IndexError('Invalid index.')
def __getitem__(self, index):
ind_cat = self._categorize_index(index)
if ind_cat == _SINGLE_ROW:
return self._view_from_grid([self._grid[index]])
elif ind_cat == _SINGLE_ITEM:
block = self._grid[index[0]][index[1]]
block._row, block._col = index
return block
elif ind_cat == _ROW_SLICE:
return self._view_from_grid(self._grid[index])
elif ind_cat == _DOUBLE_SLICE:
new_grid = self._get_double_slice(index)
return self._view_from_grid(new_grid)
def __setitem__(self, index, value):
thing = self[index]
if isinstance(value, BlockGrid):
if isinstance(thing, BlockGrid):
if thing.shape != value.shape:
raise ShapeMismatch('Both sides of grid assignment must '
'have the same shape.')
for a, b in zip(thing, value):
a._update(b)
else:
raise TypeError('Cannot assign grid to single block.')
elif isinstance(value, (collections.Iterable, Block)):
for b in _flatten(thing):
b._update(value)
def _get_double_slice(self, index):
sl_height, sl_width = index
if isinstance(sl_width, int):
if sl_width == -1:
sl_width = slice(sl_width, None)
else:
sl_width = slice(sl_width, sl_width + 1)
if isinstance(sl_height, int):
if sl_height == -1:
sl_height = slice(sl_height, None)
else:
sl_height = slice(sl_height, sl_height + 1)
rows = self._grid[sl_height]
grid = [r[sl_width] for r in rows]
return grid
def __iter__(self):
for r in range(self.height):
for c in range(self.width):
yield self[r, c]
def animate(self, stop_time=0.2):
"""
Call this method in a loop definition to have your changes to the grid
animated in the IPython Notebook.
Parameters
----------
stop_time : float
Amount of time to pause between loop steps.
"""
for block in self:
self.show()
time.sleep(stop_time)
yield block
clear_output(wait=True)
self.show()
def _repr_html_(self):
rows = range(self._height)
cols = range(self._width)
html = reduce(iadd,
(_TR.format(reduce(iadd,
(self[r, c]._td
for c in cols)))
for r in rows))
return _TABLE.format(uuid.uuid4(), int(self._lines_on), html)
def __str__(self):
s = ['{0}'.format(self.__class__.__name__),
'Shape: {0}'.format(self.shape)]
return os.linesep.join(s)
def copy(self):
"""
Returns an independent copy of this BlockGrid.
"""
return copy.deepcopy(self)
def show(self):
"""
Display colored grid as an HTML table.
"""
display(HTML(self._repr_html_()))
def flash(self, display_time=0.2):
"""
Display the grid for a time.
Useful for making an animation or iteratively displaying changes.
Note that this will leave the grid in place until something replaces
it in the same cell. You can use the ``clear`` function to
manually clear output.
Parameters
----------
display_time : float
Amount of time, in seconds, to display the grid.
"""
self.show()
time.sleep(display_time)
clear_output(wait=True)
def _calc_image_size(self):
"""
Calculate the size, in pixels, of the grid as an image.
Returns
-------
px_width : int
px_height : int
"""
px_width = self._block_size * self._width
px_height = self._block_size * self._height
if self._lines_on:
px_width += self._width + 1
px_height += self._height + 1
return px_width, px_height
def _write_image(self, fp, format='png'):
"""
Write an image of the current grid to a file-object.
Parameters
----------
fp : file-like
A file-like object such as an open file pointer or
a StringIO/BytesIO instance.
format : str, optional
An image format that will be understood by PIL,
e.g. 'png', 'jpg', 'gif', etc.
"""
try:
# PIL
import Image
import ImageDraw
except ImportError:
# pillow
from PIL import Image, ImageDraw
im = Image.new(
mode='RGB', size=self._calc_image_size(), color=(255, 255, 255))
draw = ImageDraw.Draw(im)
_bs = self._block_size
for r in range(self._height):
for c in range(self._width):
px_r = r * _bs
px_c = c * _bs
if self._lines_on:
px_r += r + 1
px_c += c + 1
rect = ((px_c, px_r), (px_c + _bs - 1, px_r + _bs - 1))
draw.rectangle(rect, fill=self._grid[r][c].rgb)
im.save(fp, format=format)
def show_image(self):
"""
Embed grid in the notebook as a PNG image.
"""
if sys.version_info[0] == 2:
from StringIO import StringIO as BytesIO
elif sys.version_info[0] == 3:
from io import BytesIO
im = BytesIO()
self._write_image(im)
display(ipyImage(data=im.getvalue(), format='png'))
def save_image(self, filename):
"""
Save an image representation of the grid to a file.
Image format will be inferred from file extension.
Parameters
----------
filename : str
Name of file to save to.
"""
with open(filename, 'wb') as f:
self._write_image(f, format=filename.split('.')[-1])
def to_text(self, filename=None):
"""
Write a text file containing the size and block color information
for this grid.
If no file name is given the text is sent to stdout.
Parameters
----------
filename : str, optional
File into which data will be written. Will be overwritten if
it already exists.
"""
if filename:
f = open(filename, 'w')
else:
f = sys.stdout
s = ['# width height', '{0} {1}'.format(self.width, self.height),
'# block size', '{0}'.format(self.block_size),
'# initial color', '0 0 0',
'# row column red green blue']
f.write(os.linesep.join(s) + os.linesep)
for block in self:
things = [str(x) for x in (block.row, block.col) + block.rgb]
f.write(' '.join(things) + os.linesep)
if filename:
f.close()
def _to_simple_grid(self):
"""
Make a simple representation of the table: nested lists of
of the rows containing tuples of (red, green, blue, size)
for each of the blocks.
Returns
-------
grid : list of lists
No matter the class this method is called on the returned
grid will be Python-style: row oriented with the top-left
block in the [0][0] position.
"""
return [[(x.red, x.green, x.blue, x.size) for x in row]
for row in self._grid]
def _construct_post_request(self, code_cells, secret):
"""
Construct the request dictionary that will be posted
to ipythonblocks.org.
Parameters
----------
code_cells : int, str, slice, or None
Specify any code cells to be sent and displayed with the grid.
You can specify a single cell, a Python, slice, or a combination
as a string separated by commas.
For example, '3,5,8:10' would send cells 3, 5, 8, and 9.
secret : bool
If True, this grid will not be shown randomly on ipythonblocks.org.
Returns
-------
request : dict
"""
if code_cells is not None:
code_cells = _get_code_cells(code_cells)
req = {
'python_version': tuple(sys.version_info),
'ipb_version': __version__,
'ipb_class': self.__class__.__name__,
'code_cells': code_cells,
'secret': secret,
'grid_data': {
'lines_on': self.lines_on,
'width': self.width,
'height': self.height,
'blocks': self._to_simple_grid()
}
}
return req
def post_to_web(self, code_cells=None, secret=False):
"""
Post this grid to ipythonblocks.org and return a URL to
view the grid on the web.
Parameters
----------
code_cells : int, str, or slice, optional
Specify any code cells to be sent and displayed with the grid.
You can specify a single cell, a Python, slice, or a combination
as a string separated by commas.
For example, '3,5,8:10' would send cells 3, 5, 8, and 9.
secret : bool, optional
If True, this grid will not be shown randomly on ipythonblocks.org.
Returns
-------
url : str
URL to view your grid on ipythonblocks.org.
"""
import requests
req = self._construct_post_request(code_cells, secret)
response = requests.post(_POST_URL, data=json.dumps(req))
response.raise_for_status()
return response.json()['url']
def _load_simple_grid(self, block_data):
"""
Modify the grid to reflect the data in `block_data`, which
should be a nested list of tuples as produced by `_to_simple_grid`.
Parameters
----------
block_data : list of lists
Nested list of tuples as produced by `_to_simple_grid`.
"""
if len(block_data) != self.height or \
len(block_data[0]) != self.width:
raise ShapeMismatch('block_data must have same shape as grid.')
for row in range(self.height):
for col in range(self.width):
self._grid[row][col].rgb = block_data[row][col][:3]
self._grid[row][col].size = block_data[row][col][3]
@classmethod
def from_web(cls, grid_id, secret=False):
"""
Make a new BlockGrid from a grid on ipythonblocks.org.
Parameters
----------
grid_id : str
ID of a grid on ipythonblocks.org. This will be the part of the
URL after 'ipythonblocks.org/'.
secret : bool, optional
Whether or not the grid on ipythonblocks.org is secret.
Returns
-------
grid : BlockGrid
"""
import requests
get_url = _GET_URL_PUBLIC if not secret else _GET_URL_SECRET
resp = requests.get(get_url.format(grid_id))
resp.raise_for_status()
grid_spec = resp.json()
grid = cls(grid_spec['width'], grid_spec['height'],
lines_on=grid_spec['lines_on'])
grid._load_simple_grid(grid_spec['blocks'])
return grid
class Pixel(Block):
@property
def x(self):
"""
Horizontal coordinate of Pixel.
"""
return self._col
@property
def y(self):
"""
Vertical coordinate of Pixel.
"""
return self._row
@property
def _td(self):
"""
The HTML for a table cell with the background color of this Pixel.
"""
title = _TITLE.format(self._col, self._row,
self._red, self._green, self._blue)
rgb = _RGB.format(self._red, self._green, self._blue)
return _TD.format(title, self._size, rgb)
def __str__(self):
s = ['{0}'.format(self.__class__.__name__),
'Color: ({0}, {1}, {2})'.format(self._red,
self._green,
self._blue)]
# add position information if we have it
if self._row is not None:
s[0] += ' [{0}, {1}]'.format(self._col, self._row)
return os.linesep.join(s)
class ImageGrid(BlockGrid):
"""
A grid of blocks whose colors can be individually controlled.
Parameters
----------
width : int
Number of blocks wide to make the grid.
height : int
Number of blocks high to make the grid.
fill : tuple of int, optional
An optional initial color for the grid, defaults to black.
Specified as a tuple of (red, green, blue). E.g.: (10, 234, 198)
block_size : int, optional
Length of the sides of grid blocks in pixels. One is the lower limit.
lines_on : bool, optional
Whether or not to display lines between blocks.
origin : {'lower-left', 'upper-left'}, optional
Set the location of the grid origin.
Attributes
----------
width : int
Number of blocks along the width of the grid.
height : int
Number of blocks along the height of the grid.
shape : tuple of int
A tuple of (width, height).
block_size : int
Length of the sides of grid blocks in pixels.
lines_on : bool
Whether lines are shown between blocks when the grid is displayed.
This attribute can used to toggle the whether the lines appear.
origin : str
The location of the grid origin.
"""
def __init__(self, width, height, fill=(0, 0, 0),
block_size=20, lines_on=True, origin='lower-left'):
super(ImageGrid, self).__init__(width, height, fill,
block_size, lines_on)
if origin not in ('lower-left', 'upper-left'):
s = "origin keyword must be one of {'lower-left', 'upper-left'}."
raise ValueError(s)
self._origin = origin
def _initialize_grid(self, fill):
grid = [[Pixel(*fill, size=self._block_size)
for col in range(self.width)]
for row in range(self.height)]
self._grid = grid
@property
def block_size(self):
return self._block_size
@property
def origin(self):
return self._origin
def _transform_index(self, index):
"""
Transform a single-item index from Python style coordinates to
image style coordinates in which the first item refers to column and
the second item refers to row. Also takes into account the
location of the origin.
"""
# in ImageGrid index is guaranteed to be a tuple.
# first thing, switch the coordinates since ImageGrid is column
# major and ._grid is row major.
new_ind = [index[1], index[0]]
# now take into account that the ImageGrid origin may be lower-left,
# while the ._grid origin is upper-left.
if self._origin == 'lower-left':
if new_ind[0] >= 0:
new_ind[0] = self._height - new_ind[0] - 1
else:
new_ind[0] = abs(new_ind[0]) - 1
return tuple(new_ind)
def __getitem__(self, index):
ind_cat = self._categorize_index(index)
# ImageGrid will only support single item indexing and 2D slices
if ind_cat not in (_DOUBLE_SLICE, _SINGLE_ITEM):
s = 'ImageGrid only supports 2D indexing.'
raise IndexError(s)
if ind_cat == _SINGLE_ITEM:
# should be able to index ._grid with new_ind regardless of any
# following coordinate transforms. let's just make sure.
self._grid[index[1]][index[0]]
real_index = self._transform_index(index)
pixel = self._grid[real_index[0]][real_index[1]]
pixel._col, pixel._row = index
return pixel
elif ind_cat == _DOUBLE_SLICE:
new_grid = self._get_double_slice(index)
return self._view_from_grid(new_grid)
def _get_double_slice(self, index):
cslice, rslice = index
if isinstance(rslice, int):
if rslice == -1:
rslice = slice(rslice, None)
else:
rslice = slice(rslice, rslice + 1)
if isinstance(cslice, int):
if cslice == -1:
cslice = slice(cslice, None)
else:
cslice = slice(cslice, cslice + 1)
rows = range(self._height)[rslice]
if self._origin == 'lower-left':
rows = rows[::-1]
cols = range(self._width)[cslice]
new_grid = [[self[c, r] for c in cols] for r in rows]
return new_grid
def __iter__(self):
for col in range(self.width):
for row in range(self.height):
yield self[col, row]
def _repr_html_(self):
rows = range(self._height)
cols = range(self._width)
if self._origin == 'lower-left':
rows = rows[::-1]
html = reduce(iadd,
(_TR.format(reduce(iadd,
(self[c, r]._td
for c in cols)))
for r in rows))
return _TABLE.format(uuid.uuid4(), int(self._lines_on), html)
@classmethod
def from_web(cls, grid_id, secret=False, origin='lower-left'):
"""
Make a new ImageGrid from a grid on ipythonblocks.org.
Parameters
----------
grid_id : str
ID of a grid on ipythonblocks.org. This will be the part of the
URL after 'ipythonblocks.org/'.
secret : bool, optional
Whether or not the grid on ipythonblocks.org is secret.
origin : {'lower-left', 'upper-left'}, optional
Set the location of the grid origin.
Returns
-------
grid : ImageGrid
"""
import requests
get_url = _GET_URL_PUBLIC if not secret else _GET_URL_SECRET
resp = requests.get(get_url.format(grid_id))
resp.raise_for_status()
grid_spec = resp.json()
grid = cls(grid_spec['width'], grid_spec['height'],
lines_on=grid_spec['lines_on'], origin=origin)
grid._load_simple_grid(grid_spec['blocks'])
return grid
# As a convenience, provide some colors as a custom hybrid
# dictionary and object with the color names as attributes
class _ColorBunch(dict):
"""
Customized dictionary that exposes its keys as attributes.
"""
def __init__(self, colors):
super(_ColorBunch, self).__init__(colors)
self.__dict__.update(colors)
# HTML colors
colors = _ColorBunch({
'AliceBlue': (240, 248, 255),
'AntiqueWhite': (250, 235, 215),
'Aqua': (0, 255, 255),
'Aquamarine': (127, 255, 212),
'Azure': (240, 255, 255),
'Beige': (245, 245, 220),
'Bisque': (255, 228, 196),
'Black': (0, 0, 0),
'BlanchedAlmond': (255, 235, 205),
'Blue': (0, 0, 255),
'BlueViolet': (138, 43, 226),
'Brown': (165, 42, 42),
'BurlyWood': (222, 184, 135),
'CadetBlue': (95, 158, 160),
'Chartreuse': (127, 255, 0),
'Chocolate': (210, 105, 30),
'Coral': (255, 127, 80),
'CornflowerBlue': (100, 149, 237),
'Cornsilk': (255, 248, 220),
'Crimson': (220, 20, 60),
'Cyan': (0, 255, 255),
'DarkBlue': (0, 0, 139),
'DarkCyan': (0, 139, 139),
'DarkGoldenrod': (184, 134, 11),
'DarkGray': (169, 169, 169),
'DarkGreen': (0, 100, 0),
'DarkKhaki': (189, 183, 107),
'DarkMagenta': (139, 0, 139),
'DarkOliveGreen': (85, 107, 47),
'DarkOrange': (255, 140, 0),
'DarkOrchid': (153, 50, 204),
'DarkRed': (139, 0, 0),
'DarkSalmon': (233, 150, 122),
'DarkSeaGreen': (143, 188, 143),
'DarkSlateBlue': (72, 61, 139),
'DarkSlateGray': (47, 79, 79),
'DarkTurquoise': (0, 206, 209),
'DarkViolet': (148, 0, 211),
'DeepPink': (255, 20, 147),
'DeepSkyBlue': (0, 191, 255),
'DimGray': (105, 105, 105),
'DodgerBlue': (30, 144, 255),
'FireBrick': (178, 34, 34),
'FloralWhite': (255, 250, 240),
'ForestGreen': (34, 139, 34),
'Fuchsia': (255, 0, 255),
'Gainsboro': (220, 220, 220),
'GhostWhite': (248, 248, 255),
'Gold': (255, 215, 0),
'Goldenrod': (218, 165, 32),
'Gray': (128, 128, 128),
'Green': (0, 128, 0),
'GreenYellow': (173, 255, 47),
'Honeydew': (240, 255, 240),
'HotPink': (255, 105, 180),
'IndianRed': (205, 92, 92),
'Indigo': (75, 0, 130),
'Ivory': (255, 255, 240),
'Khaki': (240, 230, 140),
'Lavender': (230, 230, 250),
'LavenderBlush': (255, 240, 245),
'LawnGreen': (124, 252, 0),
'LemonChiffon': (255, 250, 205),
'LightBlue': (173, 216, 230),
'LightCoral': (240, 128, 128),
'LightCyan': (224, 255, 255),
'LightGoldenrodYellow': (250, 250, 210),
'LightGray': (211, 211, 211),
'LightGreen': (144, 238, 144),
'LightPink': (255, 182, 193),
'LightSalmon': (255, 160, 122),
'LightSeaGreen': (32, 178, 170),
'LightSkyBlue': (135, 206, 250),
'LightSlateGray': (119, 136, 153),
'LightSteelBlue': (176, 196, 222),
'LightYellow': (255, 255, 224),
'Lime': (0, 255, 0),
'LimeGreen': (50, 205, 50),
'Linen': (250, 240, 230),
'Magenta': (255, 0, 255),
'Maroon': (128, 0, 0),
'MediumAquamarine': (102, 205, 170),
'MediumBlue': (0, 0, 205),
'MediumOrchid': (186, 85, 211),
'MediumPurple': (147, 112, 219),
'MediumSeaGreen': (60, 179, 113),
'MediumSlateBlue': (123, 104, 238),
'MediumSpringGreen': (0, 250, 154),
'MediumTurquoise': (72, 209, 204),
'MediumVioletRed': (199, 21, 133),
'MidnightBlue': (25, 25, 112),
'MintCream': (245, 255, 250),
'MistyRose': (255, 228, 225),
'Moccasin': (255, 228, 181),
'NavajoWhite': (255, 222, 173),
'Navy': (0, 0, 128),
'OldLace': (253, 245, 230),
'Olive': (128, 128, 0),
'OliveDrab': (107, 142, 35),
'Orange': (255, 165, 0),
'OrangeRed': (255, 69, 0),
'Orchid': (218, 112, 214),
'PaleGoldenrod': (238, 232, 170),
'PaleGreen': (152, 251, 152),
'PaleTurquoise': (175, 238, 238),
'PaleVioletRed': (219, 112, 147),
'PapayaWhip': (255, 239, 213),
'PeachPuff': (255, 218, 185),
'Peru': (205, 133, 63),
'Pink': (255, 192, 203),
'Plum': (221, 160, 221),
'PowderBlue': (176, 224, 230),
'Purple': (128, 0, 128),
'Red': (255, 0, 0),
'RosyBrown': (188, 143, 143),
'RoyalBlue': (65, 105, 225),
'SaddleBrown': (139, 69, 19),
'Salmon': (250, 128, 114),
'SandyBrown': (244, 164, 96),
'SeaGreen': (46, 139, 87),
'Seashell': (255, 245, 238),
'Sienna': (160, 82, 45),
'Silver': (192, 192, 192),
'SkyBlue': (135, 206, 235),
'SlateBlue': (106, 90, 205),
'SlateGray': (112, 128, 144),
'Snow': (255, 250, 250),
'SpringGreen': (0, 255, 127),
'SteelBlue': (70, 130, 180),
'Tan': (210, 180, 140),
'Teal': (0, 128, 128),
'Thistle': (216, 191, 216),
'Tomato': (255, 99, 71),
'Turquoise': (64, 224, 208),
'Violet': (238, 130, 238),
'Wheat': (245, 222, 179),
'White': (255, 255, 255),
'WhiteSmoke': (245, 245, 245),
'Yellow': (255, 255, 0),
'YellowGreen': (154, 205, 50)
})
# Flat UI colors: http://flatuicolors.com/
fui_colors = _ColorBunch({
'Alizarin': (231, 76, 60),
'Pomegranate': (192, 57, 43),
'Carrot': (230, 126, 34),
'Pumpkin': (211, 84, 0),
'SunFlower': (241, 196, 15),
'Orange': (243, 156, 18),
'Emerald': (46, 204, 113),
'Nephritis': (39, 174, 96),
'Turquoise': (26, 188, 156),
'GreenSea': (22, 160, 133),
'PeterRiver': (52, 152, 219),
'BelizeHole': (41, 128, 185),
'Amethyst': (155, 89, 182),
'Wisteria': (142, 68, 173),
'WetAsphalt': (52, 73, 94),
'MidnightBlue': (44, 62, 80),
'Concrete': (149, 165, 166),
'Asbestos': (127, 140, 141),
'Clouds': (236, 240, 241),
'Silver': (189, 195, 199)
})
|
package com.netcracker.ncstore.exception;
/**
* Exception when it is unable to create a user
*/
public class UserServiceCreationException extends RuntimeException {
public UserServiceCreationException(String message) {
super(message);
}
public UserServiceCreationException(String message, Throwable throwable) {
super(message, throwable);
}
}
|
#!/bin/bash
export PETSC_DIR=$SRC_DIR
export PETSC_ARCH=arch-conda-c-opt
if [[ $(uname) == Darwin ]]; then
SO=dylib
else
SO=so
fi
$PYTHON ./configure \
--with-fc=0 \
--with-debugging=0 \
--COPTFLAGS=-O3 \
--CXXOPTFLAGS=-O3 \
--LIBS=-Wl,-rpath,$PREFIX/lib \
--with-blas-lapack-lib=libopenblas.$SO \
--with-cmake=0 \
--with-hwloc=0 \
--with-ssl=0 \
--with-x=0 \
--prefix=$PREFIX
sedinplace() { [[ $(uname) == Darwin ]] && sed -i "" $@ || sed -i"" $@; }
for path in $PETSC_DIR $PREFIX; do
sedinplace s%$path%\${PETSC_DIR}%g $PETSC_ARCH/include/petsc*.h
done
make
make install
rm -fr $PREFIX/bin
rm -fr $PREFIX/share
rm -fr $PREFIX/lib/lib$PKG_NAME.*.dylib.dSYM
rm -f $PREFIX/lib/$PKG_NAME/conf/files
rm -f $PREFIX/lib/$PKG_NAME/conf/*.py
rm -f $PREFIX/lib/$PKG_NAME/conf/*.log
rm -f $PREFIX/lib/$PKG_NAME/conf/RDict.db
rm -f $PREFIX/lib/$PKG_NAME/conf/*BuildInternal.cmake
find $PREFIX/include -name '*.html' -delete
|
package net.jackofalltrades.idea.db;
import net.jackofalltrades.idea.IntellijBuildVersionFormatException;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class IntellijVersionHelperTest {
@Test
public void verifyIntellijBuildVersionCanBeVerified() {
assertTrue("The value should be a valid IntellijBuildVersion.", IntellijVersionHelper.isIntellijVersion("1.2.3"));
}
@Test
public void verifyNonIntellijBuildVersionFailsValidation() {
assertFalse("The value should not be a valid IntellijBuildVersion.", IntellijVersionHelper.isIntellijVersion("1.2..3"));
}
@Test
public void verifyNullIntellijBuildVersionVerifies() {
assertTrue("The value should be a valid IntellijBuildVersion.", IntellijVersionHelper.isIntellijVersion(null));
}
@Test
public void verifyRangeValidationFailsIfAnyOfTheBuildNumbersIsInvalid() {
assertFalse("The build version should not be valid when the build number is invalid.",
IntellijVersionHelper.isIntellijVersionInRange("1..2", "1.0", "1.*"));
assertFalse("The build version should not be valid when the low end build number is invalid.",
IntellijVersionHelper.isIntellijVersionInRange("1.2", "1..0", "1.*"));
assertFalse("The build version should not be valid when the high end build number is invalid.",
IntellijVersionHelper.isIntellijVersionInRange("1.2", "1.0", "1..*"));
}
@Test
public void verifyBuildVersionIsNotInProvidedRange() {
assertFalse("The build version should not be in the provided range.",
IntellijVersionHelper.isIntellijVersionInRange("2", "1.0", "1.*"));
}
@Test
public void verifyBuildVersionIsInProvidedRange() {
assertTrue("The build version should be in the provided range.",
IntellijVersionHelper.isIntellijVersionInRange("1.2.3", "1.0", "1.*"));
}
@Test
public void verifyBuildVersionIsInProvidedRangeWhenEqualToTheLowEnd() {
assertTrue("The build version should be in the provided range.",
IntellijVersionHelper.isIntellijVersionInRange("1.2.3", "1.0", "1.*"));
}
@Test
public void verifyBuildVersionIsInProvidedRangeWhenEqualToTheHighEnd() {
assertTrue("The build version should be in the provided range.",
IntellijVersionHelper.isIntellijVersionInRange("1.2.3", "1.0", "1.2.3"));
}
} |
def calculate_result(arr):
if not arr:
return 0
elif len(arr) == 1:
return arr[0]
else:
return arr[0] + arr[-1] |
BUILD_DKPRO_PACKAGE=1
BUILD_DKPRO_AUTOMATED=1
BUILD_DKPRO_INSTALLDIR=/devkit
export MAKEFLAGS="$MAKEFLAGS -j$(getconf _NPROCESSORS_ONLN)"
|
import { Component, ViewChild } from '@angular/core';
import { IonicPage, Content, ActionSheetController } from 'ionic-angular';
/**
* Generated class for the FounderPage page.
*
* See https://ionicframework.com/docs/components/#navigation for more info on
* Ionic pages and navigation.
*/
@IonicPage()
@Component({
selector: 'page-founder',
templateUrl: 'founder.html',
})
export class FounderPage {
data = {
"image1" : "assets/1.png",
"image2" : "assets/2.png",
"image3" : "assets/3.png",
"image4" : "assets/4.png",
"title": "<NAME>",
"subtitle": "Extreme coffee lover. Twitter maven. Internet practitioner. Beeraholic.",
"category": "populary",
"followers": "Followers",
"valueFollowers": "439",
"following": "Following",
"valueFollowing": "297",
"posts": "Posts",
"valuePosts": "43",
"items": [
{
"id": 1,
"category": "Prisma Labs",
"title": "Prisma Photo Editor",
"like": {
"icon":"thumbs-up",
"text": "Like",
"isActive": true
},
"comment": {
"icon":"ios-chatbubbles",
"number": "4",
"text": "Comments",
"isActive": false
}
},
{
"id": 2,
"category": "Apple watch",
"title": "Built with your heart in mind",
"like": {
"icon":"thumbs-up",
"text": "Like",
"isActive": true
},
"comment": {
"icon":"ios-chatbubbles",
"number": "4",
"text": "Comments",
"isActive": false
}
},
{
"id": 3,
"category": "Interior design trends",
"title": "Work It Baby: 14 Interior Design",
"like": {
"icon":"thumbs-up",
"text": "Like",
"isActive": true
},
"comment": {
"icon":"ios-chatbubbles",
"number": "4",
"text": "Comments",
"isActive": false
}
}
]
};
constructor() { }
} |
require 'spec_helper'
describe CFMicro::McfCommand do
shared_examples 'micro common inputs' do
describe 'inputs' do
subject { command.inputs }
it { expect(subject[:vmx][:description]).to eq "Path to micro.vmx" }
it { expect(subject[:password][:description]).to eq "Cle<PASSWORD> password for guest VM vcap user" }
end
describe 'arguments' do
subject { command.arguments }
it 'has the correct argument order' do
should eq([
{:type => :required, :value => nil, :name => :vmx},
{:type => :optional, :value => nil, :name => :password}
])
end
end
end
describe 'micro_status' do
describe '#metadata' do
let(:command) { Mothership.commands[:micro_status] }
include_examples 'micro common inputs'
describe 'command' do
subject { command }
its(:description) { should eq "Display Micro Cloud Foundry VM status" }
end
end
end
describe '#micro_offline' do
describe 'metadata' do
let(:command) { Mothership.commands[:micro_offline] }
include_examples 'micro common inputs'
describe 'command' do
subject { command }
its(:description) { should eq "Micro Cloud Foundry offline mode" }
end
end
end
describe '#micro_online' do
describe 'metadata' do
let(:command) { Mothership.commands[:micro_online] }
include_examples 'micro common inputs'
describe 'command' do
subject { command }
its(:description) { should eq "Micro Cloud Foundry online mode" }
end
end
end
end
|
#!/bin/bash
cd /Mythic/mythic
export PYTHONPATH=/Mythic:/Mythic/mythic
python3.8 mythic_service.py
|
<reponame>glowroot/glowroot-instrumentation
/**
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.glowroot.instrumentation.test.harness.util;
import java.io.IOException;
import java.util.List;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.assertj.core.util.Lists;
public class Docker {
private Docker() {}
public static String start(String image, String... args) throws IOException {
List<String> command = Lists.newArrayList();
command.add("docker");
command.add("run");
command.add("--name");
String name = uniqueName();
command.add(name);
command.add("--rm");
for (String arg : args) {
command.add(arg);
}
command.add(image);
startProcess(command);
return name;
}
public static void stop(String name) throws Exception {
List<String> command = Lists.newArrayList();
command.add("docker");
command.add("stop");
command.add(name);
Process process = startProcess(command);
process.waitFor();
}
public static String createNetwork() throws IOException {
List<String> command = Lists.newArrayList();
command.add("docker");
command.add("network");
command.add("create");
String name = uniqueName();
command.add(name);
startProcess(command);
return name;
}
public static void removeNetwork(String networkName) throws Exception {
List<String> command = Lists.newArrayList();
command.add("docker");
command.add("network");
command.add("rm");
command.add(networkName);
Process process = startProcess(command);
process.waitFor();
}
private static String uniqueName() {
return "test-" + new Random().nextInt(Integer.MAX_VALUE);
}
private static Process startProcess(List<String> command) throws IOException {
ProcessBuilder processBuilder = new ProcessBuilder(command);
processBuilder.redirectErrorStream(true);
Process process = processBuilder.start();
ConsoleOutputPipe consoleOutputPipe =
new ConsoleOutputPipe(process.getInputStream(), System.out);
ExecutorService executor = Executors.newSingleThreadExecutor();
executor.submit(consoleOutputPipe);
return process;
}
}
|
import multiprocessing
def create_and_start_worker(routine, pid):
name = "worker-" + str(pid) # Create a unique name for the worker process
worker_process = multiprocessing.Process(name=name, target=routine) # Create a new worker process
worker_process.daemon = False # Set the worker process as non-daemon
worker_process.start() # Start the worker process |
class AuthenticationSystem {
currentUser: any;
get isLogged() {
return !!this.currentUser;
}
constructor() {
this.currentUser = localStorage.getItem('currentUser') || false;
}
login(username: string) {
this.currentUser = username;
localStorage.setItem('currentUser', username);
}
logout() {
this.currentUser = false;
localStorage.removeItem('currentUser');
}
} |
'use strict';
const log = require('npmlog');
const tty = require('tty');
log.addLevel('debug', -Infinity, {fg: 'grey'});
log.addLevel('verbose', 1000, {fg: 'green'});
log.addLevel('info', 2000, {fg: 'blue'});
log.addLevel('warn', 4000, {fg: 'yellow'});
log.addLevel('error', 5000, {fg: 'red'});
log.prefixStyle = {
fg: 'grey',
underline: true
};
module.exports = log;
|
<gh_stars>0
class AddCascadingToRunSegments < ActiveRecord::Migration[5.0]
def change
remove_foreign_key :segments, :runs
add_foreign_key :segments, :runs, on_delete: :cascade
remove_foreign_key :segment_histories, :segments
add_foreign_key :segment_histories, :segments, on_delete: :cascade
end
end
|
// Semmle test case for CWE-190: Integer Overflow or Wraparound
// http://cwe.mitre.org/data/definitions/190.html
package test.cwe190.semmle.tests;
public class Holder {
public int dat;
public Holder(int d) {
dat = d;
}
public void setData(int d) {
dat = d;
}
public int getData() {
return dat;
}
}
|
// Your JavaScript
|
<reponame>OzTamir/RubiksCube
var stats, scene, renderer, composer;
var camera, cameraControls;
var cube = new THREE.Object3D();
var cubeBurst = false;
var cameraDistance = 15;
var partsInCubeFace = 3;
var cubePartSize = 1.5;
var cubePartDepth = 0.5;
var colors = [0xffffff, 0x00ffff, 0xff0000, 0x00ff00, 0x0000ff, 0xff00ff];
var cubesIDs = [];
var rotations = {
'LTR' : {
'CW' : new THREE.Euler(Math.PI / 2, 0, 0, 'XYZ'),
'CCW' : new THREE.Euler(-Math.PI / 2, 0, 0, 'XYZ')
},
'TTB' : {
'CW' : new THREE.Euler(0, 0, Math.PI / 2, 'XYZ'),
'CCW' : new THREE.Euler(0, 0, -Math.PI / 2, 'XYZ')
},
'FTH' : {
'CW' : new THREE.Euler(0, Math.PI / 2, 0, 'XYZ'),
'CCW' : new THREE.Euler(0, -Math.PI / 2, 0, 'XYZ')
}
};
var isFace = {
'right' : function(idx) {return idx === 0 || idx === 1;},
'left' : function(idx) {return idx === 1 || idx === 2;},
'bottom' : function(idx) {return idx === 3 || idx === 4;},
'top' : function(idx) {return idx === 5 || idx === 6;},
'front' : function(idx) {return idx === 7 || idx === 8;},
'hind' : function(idx) {return idx === 9 || idx === 10;}
};
var groupingFunctions = {
'TTB' : function(layer) {
var groupedCubes = new THREE.Object3D();
for (var i = 0; i < cubesIDs.length; i++) {
for (var j = 0; j < cubesIDs[i].length; j++) {
groupedCubes.add(
cube.getObjectById(
cubesIDs[i][j][layer]
)
);
}
}
return groupedCubes;
},
'LTR' : function(layer) {
var groupedCubes = new THREE.Object3D();
for (var i = 0; i < cubesIDs[layer].length; i++) {
for (var j = 0; j < cubesIDs[layer][i].length; j++) {
groupedCubes.add(
cube.getObjectById(
cubesIDs[layer][i][j]
)
);
}
}
return groupedCubes;
},
'FTH' : function(layer) {
var groupedCubes = new THREE.Object3D();
for (var i = 0; i < cubesIDs.length; i++) {
for (var j = 0; j < cubesIDs[i][layer].length; j++) {
groupedCubes.add(
cube.getObjectById(
cubesIDs[i][layer][j]
)
);
}
}
return groupedCubes;
}
}
function pickRandomProperty(obj) {
var result;
var count = 0;
for (var prop in obj)
if (Math.random() < 1 / ++count)
result = prop;
return result;
};
function hexForSerialVector(sv) {
return (((sv.x << 16) + sv.y << 8) + sv.z) * 10 % 255;
};
function any(funcArr, obj) {
var result = false;
for (var i = 0; i < funcArr.length; i++) {
result |= funcArr[i](obj);
}
return result;
};
function isFaceVisible(serialVector, faceIdx) {
var funcArr = [];
switch (serialVector.z) {
case 0:
funcArr.push(isFace.hind);
break;
case 2:
funcArr.push(isFace.front);
break;
}
switch (serialVector.y) {
case 0:
funcArr.push(isFace.top);
break;
case 2:
funcArr.push(isFace.bottom);
break;
}
switch (serialVector.x) {
case 0:
funcArr.push(isFace.left);
break;
case 2:
funcArr.push(isFace.right);
break;
}
return any(funcArr, faceIdx);
};
function addCubePart(size, color, rotation, position, serialVector) {
if (rotation === undefined) {
rotation = new THREE.Euler(0, 0, 0, 'XYZ');
}
if (position === undefined) {
position = new THREE.Vector3(0, 0, 0);
}
// For debug purpose
if (cubeBurst) {
position.x = serialVector.x - 1;
position.y = serialVector.y - 1;
position.z = serialVector.z - 1;
position.multiplyScalar(3);
}
var geometry = new THREE.BoxGeometry(size, size, size);
var faceColor;
for ( var i = 0; i < geometry.faces.length; i += 2) {
if (!isFaceVisible(serialVector, i)) {
faceColor = 0x000000;
}
else {
faceColor = getNewColor(i);
}
geometry.faces[i].color.setHex(faceColor);
geometry.faces[i + 1].color.setHex(faceColor);
}
var material = new THREE.MeshBasicMaterial( { color: 0xffffff, vertexColors: THREE.FaceColors } );
var cube = new THREE.Mesh( geometry, material );
var box = new THREE.BoxHelper( cube );
var obj = new THREE.Object3D();
obj.add(cube);
obj.add(box);
obj.position.copy(position);
obj.rotation.copy(rotation);
return obj;
};
function getNewColor(faceIdx) {
if (isFace.right(faceIdx))
return colors[0];
if (isFace.left(faceIdx))
return colors[1];
if (isFace.front(faceIdx))
return colors[2];
if (isFace.hind(faceIdx))
return colors[3];
if (isFace.top(faceIdx))
return colors[4];
if (isFace.bottom(faceIdx))
return colors[5];
return 0x000000;
};
function createCube() {
var cubePart;
// Create a face of the cube
for (var i = 0; i < partsInCubeFace; i++) {
cubesIDs.push([]);
for (var j = 0; j < partsInCubeFace; j++) {
cubesIDs[i].push([]);
for (var k = 0; k < partsInCubeFace; k++) {
cubePart = addCubePart(
cubePartSize,
undefined,
undefined,
new THREE.Vector3(
(i - 1) * cubePartSize, // X
(j - 1) * cubePartSize, // Y
(k - 1) * cubePartSize // Z
),
new THREE.Vector3(i, j, k)
);
cubesIDs[i][j].push(cubePart.id);
cube.add(cubePart);
}
}
}
cube.rotation.copy(new THREE.Euler(Math.PI / 2, 0, 0, 'XYZ'));
}
function addEulers(a, b) {
return new THREE.Euler(
a._x + b._x,
a._y + b._y,
a._z + b._z,
'XYZ'
);
};
function rotateCube(rotation, direction, layer) {
var groupedCubes = groupingFunctions[rotation](layer);
rotation = rotations[rotation][direction];
groupedCubes.rotation.copy(rotation);
groupedCubes.updateMatrixWorld();
var child;
while (groupedCubes.children.length > 0) {
child = groupedCubes.children[0];
cube.add(
groupedCubes.children[0]
);
child.position.copy((new THREE.Vector3()).setFromMatrixPosition(child.matrixWorld));
child.rotation.copy(addEulers(rotation, child.rotation));
}
}
function addCubeFace(position, rotation) {
if (rotation === undefined) {
rotation = new THREE.Euler(0, 0, 0, 'XYZ');
}
if (position === undefined) {
position = new THREE.Vector3(0, 0, 0);
}
var face = new THREE.Object3D();
// Create a face of the cube
for (var i = 0; i < partsInCubeFace; i++) {
for (var j = 0; j < partsInCubeFace; j++) {
face.add(
addCubePart(
cubePartSize,
pickRandomProperty(colors),
undefined,
new THREE.Vector3(-1.5 + j * cubePartSize, -1.5 + i * cubePartSize, 0)
)
);
}
}
face.position.copy(position);
face.rotation.copy(rotation);
return face;
}
// init the scene
function init(){
if( Detector.webgl ){
renderer = new THREE.WebGLRenderer({
antialias : true, // to get smoother output
preserveDrawingBuffer : true // to allow screenshot
});
renderer.setClearColor( 0xbbbbbb );
}else{
renderer = new THREE.CanvasRenderer();
}
renderer.setSize( window.innerWidth, window.innerHeight );
document.getElementById('container').appendChild(renderer.domElement);
// add Stats.js - https://github.com/mrdoob/stats.js
stats = new Stats();
stats.domElement.style.position = 'absolute';
stats.domElement.style.bottom = '0px';
document.body.appendChild( stats.domElement );
// create a scene
scene = new THREE.Scene();
// put a camera in the scene
camera = new THREE.PerspectiveCamera(35, window.innerWidth / window.innerHeight, 1, 10000 );
camera.position.set(0, 0, cameraDistance);
scene.add(camera);
// create a camera contol
cameraControls = new THREE.TrackballControls( camera )
// transparently support window resize
THREEx.WindowResize.bind(renderer, camera);
// allow 'p' to make screenshot
THREEx.Screenshot.bindKey(renderer);
// here you add your objects
// - you will most likely replace this part by your own
var light = new THREE.AmbientLight( Math.random() * 0xffffff );
scene.add( light );
var light = new THREE.DirectionalLight( Math.random() * 0xffffff );
light.position.set( Math.random(), Math.random(), Math.random() ).normalize();
scene.add( light );
var light = new THREE.DirectionalLight( Math.random() * 0xffffff );
light.position.set( Math.random(), Math.random(), Math.random() ).normalize();
scene.add( light );
var light = new THREE.PointLight( Math.random() * 0xffffff );
light.position.set( Math.random()-0.5, Math.random()-0.5, Math.random()-0.5 )
.normalize().multiplyScalar(1.2);
scene.add( light );
var light = new THREE.PointLight( Math.random() * 0xffffff );
light.position.set( Math.random()-0.5, Math.random()-0.5, Math.random()-0.5 )
.normalize().multiplyScalar(1.2);
scene.add( light );
createCube();
scene.add(
cube
);
}
function handleKeyUp(event) {
window.keyEvent = {
pressed : true,
};
switch (event.keyCode) {
// --- TOP TO BOTTOM --- //
case 81: // 'Q'
case 87: // 'W'
window.keyEvent.rotation = 'TTB';
window.keyEvent.direction = event.keyCode === 81 ? 'CW' : 'CCW';
window.keyEvent.layer = 0;
break;
case 65: // 'A'
case 83: // 'S'
window.keyEvent.rotation = 'TTB';
window.keyEvent.direction = event.keyCode === 65 ? 'CW' : 'CCW';
window.keyEvent.layer = 1;
break;
case 88: //'X'
case 90: //'Z'
window.keyEvent.rotation = 'TTB';
window.keyEvent.direction = event.keyCode === 90 ? 'CW' : 'CCW';
window.keyEvent.layer = 2;
break;
// --------------------- //
// --- LEFT TO RIGHT --- //
case 69: // 'E'
case 82: // 'R'
window.keyEvent.rotation = 'LTR';
window.keyEvent.direction = event.keyCode === 69 ? 'CW' : 'CCW';
window.keyEvent.layer = 0;
break;
case 68: // 'D'
case 70: // 'F'
window.keyEvent.rotation = 'LTR';
window.keyEvent.direction = event.keyCode === 68 ? 'CW' : 'CCW';
window.keyEvent.layer = 1;
break;
case 67: //'C'
case 86: //'V'
window.keyEvent.rotation = 'LTR';
window.keyEvent.direction = event.keyCode === 67 ? 'CW' : 'CCW';
window.keyEvent.layer = 2;
break;
// --------------------- //
// --- FRONT TO HIND --- //
case 84: // 'T'
case 89: // 'Y'
window.keyEvent.rotation = 'FTH';
window.keyEvent.direction = event.keyCode === 84 ? 'CW' : 'CCW';
window.keyEvent.layer = 0;
break;
case 71: // 'G'
case 72: // 'H'
window.keyEvent.rotation = 'FTH';
window.keyEvent.direction = event.keyCode === 71 ? 'CW' : 'CCW';
window.keyEvent.layer = 1;
break;
case 66: //'B'
case 78: //'N'
window.keyEvent.rotation = 'FTH';
window.keyEvent.direction = event.keyCode === 66 ? 'CW' : 'CCW';
window.keyEvent.layer = 2;
break;
// ---------------------- /
default:
window.keyEvent.pressed = false;
}
}
window.addEventListener('keyup', handleKeyUp, false);
// animation loop
function animate() {
// loop on request animation loop
// - it has to be at the begining of the function
// - see details at http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
requestAnimationFrame( animate );
if (window.keyEvent === undefined) {
window.keyEvent = {};
}
if (window.keyEvent.pressed) {
window.keyEvent.pressed = false;
rotateCube(window.keyEvent.rotation, window.keyEvent.direction, window.keyEvent.layer);
}
// do the render
render();
// update stats
stats.update();
}
// render the scene
function render() {
// variable which is increase by Math.PI every seconds - usefull for animation
var PIseconds = Date.now() * Math.PI;
// update camera controls
cameraControls.update();
// actually render the scene
renderer.render( scene, camera );
}
// Do the thing
if( !init() ) animate(); |
import '@polymer/paper-button';
import '@polymer/paper-input/paper-textarea';
import { html, PolymerElement } from '@polymer/polymer';
import '@radi-cho/star-rating';
import { ReduxMixin } from '../mixins/redux-mixin';
import { feedbackActions, toastActions } from '../redux/actions';
import { store } from '../redux/store';
class Feedback extends ReduxMixin(PolymerElement) {
constructor() {
super(...arguments);
this.rating = false;
this.contentRating = 0;
this.styleRating = 0;
this.comment = '';
this.user = {};
this.previousFeedback = {};
this.feedbackFetching = false;
this.feedbackAdding = false;
this.feedbackAddingError = {};
this.feedbackDeleting = false;
this.feedbackDeletingError = {};
this.showDeleteButton = false;
this.feedbackState = {};
}
static get template() {
return html `
<style>
#feedback-comment {
width: 100%;
}
star-rating,
.caption {
display: inline-block;
vertical-align: bottom;
--star-color: var(--default-primary-color);
}
paper-button {
margin: 0;
line-height: 1.4;
border: 1px solid var(--default-primary-color);
}
paper-button[primary] {
background-color: var(--default-primary-color);
color: var(--text-primary-color);
}
paper-button.delete-button {
color: var(--text-accent-color);
padding: -2px;
}
.helper {
font-size: 12px;
line-height: 1;
}
@media (min-width: 640px) {
.caption {
width: 25%;
}
}
@media (max-width: 640px) {
star-rating,
.caption {
display: block;
}
}
</style>
<div class="container">
<div>
<div class="caption">{$ feedback.contentCaption $}:</div>
<star-rating rating="{{contentRating}}"></star-rating>
</div>
<div>
<div class="caption">{$ feedback.styleCaption $}:</div>
<star-rating rating="{{styleRating}}"></star-rating>
</div>
<paper-textarea
id="commentInput"
hidden$="[[!rating]]"
label="Comment"
value="{{comment}}"
maxlength="256"
></paper-textarea>
<p hidden$="[[!rating]]" class="helper">{$ feedback.helperText $}</p>
<paper-button
primary
hidden$="[[!rating]]"
on-click="_sendFeedback"
ga-on="click"
ga-event-category="feedback"
ga-event-action="send feedback"
ga-event-label$="submit the [[rating]] stars feedback"
>
{$ feedback.save $}
</paper-button>
<paper-button
class="delete-button"
hidden$="[[!showDeleteButton]]"
on-click="_dispatchDeleteFeedback"
ga-on="click"
ga-event-category="feedback"
ga-event-action="delete feedback"
ga-event-label$="delete the feedback record"
>
{$ feedback.deleteFeedback $}
</paper-button>
</div>
`;
}
static get properties() {
return {
rating: {
type: Boolean,
value: false,
computed: '_hasRating(contentRating, styleRating)',
},
contentRating: {
type: Number,
value: 0,
},
styleRating: {
type: Number,
value: 0,
},
comment: {
type: String,
value: '',
},
collection: {
type: String,
value: 'sessions',
},
dbItem: {
type: String,
observer: '_dbItemChanged',
},
user: {
type: Object,
observer: '_userChanged',
},
previousFeedback: {
type: Object,
observer: '_previousFeedbackChanged',
},
feedbackFetching: {
type: Boolean,
},
feedbackAdding: {
type: Boolean,
observer: '_feedbackAddingChanged',
},
feedbackAddingError: {
type: Object,
},
feedbackDeleting: {
type: Boolean,
observer: '_feedbackDeletingChanged',
},
feedbackDeletingError: {
type: Object,
},
showDeleteButton: {
type: Boolean,
value: false,
},
feedbackState: {
type: Object,
observer: '_updateFeedbackState',
},
};
}
static get is() {
return 'feedback-block';
}
stateChanged(state) {
return this.setProperties({
feedbackState: state.feedback,
feedbackDeleting: state.feedback.deleting,
feedbackDeletingError: state.feedback.deletingError,
feedbackAdding: state.feedback.adding,
feedbackAddingError: state.feedback.addingError,
feedbackFetching: state.feedback.fetching,
user: state.user,
});
}
_updateFeedbackState() {
if (this.feedbackState[this.collection]) {
if (this.dbItem)
this.previousFeedback = this.feedbackState[this.collection][this.dbItem];
}
else {
this.previousFeedback = undefined;
}
}
_userChanged(newUser) {
if (newUser.signedIn) {
if (this.dbItem && !this.feedbackFetching)
this._dispatchPreviousFeedback();
}
else {
this._clear();
}
}
_clear() {
this.contentRating = 0;
this.styleRating = 0;
this.comment = '';
this.showDeleteButton = false;
}
_dbItemChanged(newdbItem, _olddbItem) {
this._clear();
if (newdbItem) {
// Check for previous feedback once the session/speaker id is available
this._updateFeedbackState();
this._previousFeedbackChanged();
if (this.user.signedIn && !this.feedbackFetching && this.previousFeedback === undefined) {
this._dispatchPreviousFeedback();
}
}
}
_previousFeedbackChanged() {
if (this.previousFeedback) {
this.showDeleteButton = true;
this.contentRating = this.previousFeedback.contentRating;
this.styleRating = this.previousFeedback.styleRating;
this.comment = this.previousFeedback.comment;
}
}
_sendFeedback() {
if (!this.rating)
return;
this._dispatchSendFeedback();
}
_dispatchSendFeedback() {
store.dispatch(feedbackActions.addComment({
userId: this.user.uid,
collection: this.collection,
dbItem: this.dbItem,
contentRating: this.contentRating,
styleRating: this.styleRating,
comment: this.comment,
}));
}
_dispatchPreviousFeedback() {
store.dispatch(feedbackActions.checkPreviousFeedback({
collection: this.collection,
dbItem: this.dbItem,
userId: this.user.uid,
}));
}
_dispatchDeleteFeedback() {
store.dispatch(feedbackActions.deleteFeedback({
collection: this.collection,
dbItem: this.dbItem,
userId: this.user.uid,
}));
}
_feedbackAddingChanged(newFeedbackAdding, oldFeedbackAdding) {
if (oldFeedbackAdding && !newFeedbackAdding) {
if (this.feedbackAddingError) {
toastActions.showToast({
message: '{$ feedback.somethingWentWrong $}',
action: {
title: 'Retry',
callback: () => {
this._dispatchSendFeedback();
},
},
});
}
else {
toastActions.showToast({ message: '{$ feedback.feedbackRecorded $}' });
}
}
}
_feedbackDeletingChanged(newFeedbackDeleting, oldFeedbackDeleting) {
if (oldFeedbackDeleting && !newFeedbackDeleting) {
if (this.feedbackDeletingError) {
toastActions.showToast({
message: '{$ feedback.somethingWentWrong $}',
action: {
title: 'Retry',
callback: () => {
this._dispatchDeleteFeedback();
},
},
});
}
else {
this._clear();
toastActions.showToast({ message: '{$ feedback.feedbackDeleted $}' });
}
}
}
_hasRating(contentRating, styleRating) {
return (contentRating > 0 && contentRating <= 5) || (styleRating > 0 && styleRating <= 5);
}
}
window.customElements.define(Feedback.is, Feedback);
//# sourceMappingURL=feedback-block.js.map |
package test;
import me.insidezhou.southernquiet.FrameworkAutoConfiguration;
import me.insidezhou.southernquiet.file.web.controller.FileWebController;
import me.insidezhou.southernquiet.file.web.model.FileInfo;
import me.insidezhou.southernquiet.file.web.model.IdHashAlgorithm;
import me.insidezhou.southernquiet.filesystem.FileSystem;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.digest.DigestUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
import org.springframework.boot.autoconfigure.web.ServerProperties;
import org.springframework.boot.autoconfigure.web.reactive.WebFluxAutoConfiguration;
import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebFlux;
import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient;
import org.springframework.boot.test.autoconfigure.web.reactive.WebTestClientAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.ApplicationContext;
import org.springframework.core.io.FileSystemResource;
import org.springframework.http.MediaType;
import org.springframework.http.client.MultipartBodyBuilder;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.reactive.server.EntityExchangeResult;
import org.springframework.test.web.reactive.server.WebTestClient;
import org.springframework.util.StreamUtils;
import org.springframework.util.SystemPropertyUtils;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.time.Duration;
import java.util.List;
@SuppressWarnings("ConstantConditions")
@RunWith(SpringRunner.class)
@SpringBootTest(classes = FileWebTest.class)
@AutoConfigureWebFlux
@AutoConfigureWebTestClient
public class FileWebControllerTest {
@SpringBootConfiguration
@EnableAutoConfiguration
@ImportAutoConfiguration(classes = {
WebFluxAutoConfiguration.class,
WebTestClientAutoConfiguration.class
})
public static class Config {}
private WebTestClient client;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private FileSystem fileSystem;
@Autowired
private ServerProperties serverProperties;
private final FileSystemResource resource = new FileSystemResource("src/test/resources/test.png");
private String base64EncodedFile;
private String contextPath;
@Before
public void before() throws Exception {
contextPath = serverProperties.getServlet().getContextPath();
client = WebTestClient.bindToApplicationContext(applicationContext).configureClient().responseTimeout(Duration.ofMillis(300000)).build();
byte[] data = StreamUtils.copyToByteArray(resource.getInputStream());
base64EncodedFile = Base64.encodeBase64String(data);
}
@Test
public void upload() {
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("files", resource, MediaType.IMAGE_PNG);
uploadAssert(builder, "upload");
}
@Test
public void createSymbolicLink() throws IOException {
//创建link
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("files", resource, MediaType.IMAGE_PNG);
uploadAssert(builder, "upload?link=sha1");
InputStream inputStream = new ByteArrayInputStream(StreamUtils.copyToByteArray(resource.getInputStream()));
String link = DigestUtils.sha1Hex(inputStream);
//通过link获取文件
EntityExchangeResult<byte[]> result = client.get()
.uri("/image/{id}/{hashAlgorithm}", link, IdHashAlgorithm.sha1)
.exchange()
.expectStatus().is2xxSuccessful()
.expectHeader().contentLength(resource.contentLength())
.expectHeader().contentTypeCompatibleWith(MediaType.IMAGE_PNG)
.expectBody()
.returnResult();
inputStream.reset();
ByteArrayInputStream resultInputStream = new ByteArrayInputStream(result.getResponseBody());
Assert.assertEquals(resultInputStream.available(), inputStream.available());
//1.读取软链接和源文件,指纹一样
String hash1 = org.springframework.util.DigestUtils.md5DigestAsHex(inputStream);
String hash2 = org.springframework.util.DigestUtils.md5DigestAsHex(resultInputStream);
Assert.assertEquals(hash1, hash2);
}
@Test
public void base64upload() {
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("files", base64EncodedFile);
uploadAssert(builder, "base64upload");
}
@SuppressWarnings("UnusedReturnValue")
private FileInfo uploadAssert(MultipartBodyBuilder builder, String uri) {
EntityExchangeResult<List<FileInfo>> result = client.post()
.uri(contextPath + "/" + uri)
.accept(MediaType.APPLICATION_JSON)
.bodyValue(builder.build())
.exchange()
.expectStatus().is2xxSuccessful()
.expectHeader().contentTypeCompatibleWith(MediaType.APPLICATION_JSON)
.expectBodyList(FileInfo.class).hasSize(1)
.returnResult();
FileInfo fileInfo = result.getResponseBody().get(0);
Assert.assertEquals(MediaType.IMAGE_PNG_VALUE, fileInfo.getContentType());
Assert.assertEquals(contextPath + "/image/" + fileInfo.getId(), fileInfo.getUrl());
return fileInfo;
}
@Test
public void file() throws Exception {
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("files", resource, MediaType.IMAGE_PNG);
FileInfo fileInfo = uploadAssert(builder, "upload");
EntityExchangeResult<byte[]> result = client.get()
.uri("/file/{id}", fileInfo.getId())
.exchange()
.expectStatus().is2xxSuccessful()
.expectHeader().contentLength(resource.contentLength())
.expectHeader().contentTypeCompatibleWith(MediaType.IMAGE_PNG)
.expectBody()
.returnResult();
String filePath = FileWebController.getFilePath(fileInfo.getId());
fileSystem.put(filePath + "_file.png", new ByteArrayInputStream(result.getResponseBody()));
}
@Test
public void base64file() {
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("files", resource, MediaType.IMAGE_PNG);
FileInfo fileInfo = uploadAssert(builder, "upload");
EntityExchangeResult<byte[]> result = client.get()
.uri("/base64file/{id}", fileInfo.getId())
.exchange()
.expectStatus().is2xxSuccessful()
.expectHeader().contentTypeCompatibleWith(MediaType.TEXT_PLAIN)
.expectBody()
.returnResult();
Assert.assertEquals(base64EncodedFile, new String(result.getResponseBody(), StandardCharsets.UTF_8));
}
@Test
public void image() throws Exception {
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("files", resource, MediaType.IMAGE_PNG);
FileInfo fileInfo = uploadAssert(builder, "upload");
String etag = "\"" + fileInfo.getId() + "\"";
EntityExchangeResult<byte[]> result = client.get()
.uri("/image/{hash}", fileInfo.getId())
.exchange()
.expectStatus().is2xxSuccessful()
.expectHeader().contentTypeCompatibleWith(MediaType.IMAGE_PNG)
.expectHeader().valueMatches("etag", etag)
.expectBody()
.returnResult();
String filePath = FileWebController.getFilePath(fileInfo.getId());
fileSystem.put(filePath + "_image.png", new ByteArrayInputStream(result.getResponseBody()));
Assert.assertTrue(result.getResponseHeaders().getContentLength() > 0);
}
@Test
public void imageNotModified() {
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("files", resource, MediaType.IMAGE_PNG);
FileInfo fileInfo = uploadAssert(builder, "upload");
String etag = "\"" + fileInfo.getId() + "\"";
client.get()
.uri("/image/{hash}", fileInfo.getId())
.headers(httpHeaders -> httpHeaders.setIfNoneMatch(etag))
.exchange()
.expectStatus().isNotModified();
}
}
|
<filename>backend/src/test/java/fr/inra/urgi/faidare/api/brapi/v1/StudyControllerTest.java<gh_stars>1-10
package fr.inra.urgi.faidare.api.brapi.v1;
import fr.inra.urgi.faidare.domain.data.phenotype.ObservationUnitVO;
import fr.inra.urgi.faidare.domain.data.study.StudyDetailVO;
import fr.inra.urgi.faidare.domain.response.PaginatedList;
import fr.inra.urgi.faidare.domain.response.Pagination;
import fr.inra.urgi.faidare.domain.response.PaginationImpl;
import fr.inra.urgi.faidare.repository.es.GermplasmRepository;
import fr.inra.urgi.faidare.repository.es.ObservationUnitRepository;
import fr.inra.urgi.faidare.repository.es.StudyRepository;
import fr.inra.urgi.faidare.repository.file.CropOntologyRepository;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.test.web.servlet.MockMvc;
import java.util.ArrayList;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* @author gcornut
*/
@ExtendWith(SpringExtension.class)
@WebMvcTest(controllers = StudyController.class)
class StudyControllerTest {
@Autowired
private MockMvc mockMvc;
@MockBean
private ObservationUnitRepository observationUnitRepository;
@MockBean
private GermplasmRepository germplasmRepository;
@MockBean
private CropOntologyRepository cropOntologyRepository;
@MockBean
private StudyRepository repository;
private static StudyDetailVO STUDY;
static {
String id = "ZG9pOjEwLjE1NDU0LzEuNDkyMTc4NjM4MTc4MzY5NkUxMg==";
String uri = "http://doi.org/foo/bar";
STUDY = new StudyDetailVO();
STUDY.setUri(uri);
STUDY.setStudyDbId(id);
}
@Test
void should_Not_Show_JSON_LD_Fields_By_Default() throws Exception {
when(repository.getById(STUDY.getStudyDbId())).thenReturn(STUDY);
mockMvc.perform(get("/brapi/v1/studies/" + STUDY.getStudyDbId())
.contentType(MediaType.APPLICATION_JSON_UTF8))
.andExpect(status().isOk())
.andExpect(jsonPath("$.result.@id").doesNotExist());
}
@Test
void should_Show_JSON_LD_Fields_When_Asked() throws Exception {
when(repository.getById(STUDY.getStudyDbId())).thenReturn(STUDY);
mockMvc.perform(get("/brapi/v1/studies/"+ STUDY.getStudyDbId())
.accept(BrapiJSONViewHandler.APPLICATION_LD_JSON)
.contentType(MediaType.APPLICATION_JSON_UTF8))
.andExpect(status().isOk())
.andExpect(jsonPath("$.result.@id", is(STUDY.getUri())));
}
@Test
void should_Get_By_Id() throws Exception {
String identifier = "identifier";
StudyDetailVO study = new StudyDetailVO();
when(repository.getById(identifier)).thenReturn(study);
mockMvc.perform(get("/brapi/v1/studies/" + identifier)
.contentType(MediaType.APPLICATION_JSON_UTF8))
.andExpect(status().isOk());
}
@Test
void should_Return_Not_Found() throws Exception {
when(repository.getById("foo")).thenReturn(null);
mockMvc.perform(get("/brapi/v1/studies/foo")
.contentType(MediaType.APPLICATION_JSON_UTF8))
.andExpect(status().isNotFound())
.andExpect(jsonPath("$.metadata.status", hasSize(1)))
.andExpect(jsonPath("$.metadata.status[0].code", is("404")));
}
@Test
void should_Paginate_ObservationUnits_By_Study() throws Exception {
String studyDbId = "foo";
int page = 2;
int pageSize = 12;
Pagination pagination = PaginationImpl.create(pageSize, page, 1000);
PaginatedList<ObservationUnitVO> observationUnits = new PaginatedList<>(pagination, new ArrayList<>());
when(observationUnitRepository.find(any())).thenReturn(observationUnits);
mockMvc.perform(get("/brapi/v1/studies/{id}/observationUnits?page={page}&pageSize={pageSize}", studyDbId, page, pageSize)
.contentType(MediaType.APPLICATION_JSON_UTF8))
.andExpect(jsonPath("$.metadata.pagination.currentPage", is(page)))
.andExpect(jsonPath("$.metadata.pagination.pageSize", is(pageSize)));
}
}
|
<reponame>JoshLuong/Trippo
import styled from "styled-components";
import * as c from "../../colors/colors";
import { Grid } from "@material-ui/core";
import WarningIcon from "@material-ui/icons/Warning";
import { IconButton } from "@material-ui/core";
interface ButtonProps {
$edit?: boolean;
$hasMarginTop?: boolean;
}
export const dayDiv = styled.div`
padding: 0.1em 1em 1em 1em;
position: relative;
min-height: calc(100% - 3.25em);
box-sizing: border-box;
overflow: auto;
overflow-x: hidden;
`;
export const EditButton = styled(IconButton) <ButtonProps>`
background-color: ${(props) =>
props.$edit ? c.YELLOW : "rgba(0, 0, 0, 0.12)"};
width: 7em;
line-height: 1.25em;
border-radius: 10px;
margin-bottom: 2em;
text-transform: none;
border: none;
color: ${c.BLACK};
font-size: 1em;
padding: 8px;
margin-top: ${(props) =>
props.$hasMarginTop ? "1.5em" : "0"};
`;
export const NoContent = styled.div`
width: 100%;
height: 100%;
text-align: center;
margin-top: 0.5em;
`;
export const Tip = styled.span`
color: ${c.DARK_ORANGE};
font-weight: bold;
`;
export const StyledWarningIcon = styled(WarningIcon)`
color: ${c.DARK_ORANGE};
padding-right: 0.25em;
`;
export const Spacer = styled.div`
display: inline-block;
width: 2em;
`;
export const StickyDiv = styled.div`
position: sticky;
top: 0;
margin: 0 0.52em 0.35em 0.52em;
padding: 0.75em 1em;
height: 2.9em;
border-radius: 2.5px;
background: #ffffff;
z-index:2;
`;
export const dayDate = styled.div`
display: flex;
color: ${c.WHITE};
font-weight: 545;
letter-spacing: 2px;
border-radius: 3px;
button {
border: none;
height: 2em;
padding: 0;
padding-left: 0.25em;
background-color: transparent;
margin: auto 0 auto 0;
z-index: 1;
i {
display: inline;
padding-right: 0.25em;
color: ${c.GREY};
font-size: 1.3em;
}
}
button:hover {
cursor: pointer;
}
div {
margin: auto;
text-align: center;
}
@media (max-width: 650px) {
font-size: 0.8em;
}
`;
export const Distance = styled.div`
border-left: 1.25px dashed ${c.DARK_GREY};
padding-left: 6px;
margin: 0.75em 3px;
`;
export const daysWeek = styled.div`
display: inline-block;
font-size: 1.25em;
letter-spacing: 0.25px;
color: ${c.BLACK};
`;
export const TimeSlots = styled.div`
padding: 0px 4px;
`;
export const Cost = styled(Grid)`
padding-top: 1em;
padding-left: 0.5em;
display: flex;
align-items: center;
div:nth-child(2) {
display: flex;
float: right;
margin-left: auto;
margin-right: 0.5em;
}
span {
padding-top: 3px;
display: inline-block;
}
`;
|
'use strict';
// used for connecting to mysql - host/user/password
// must be changed according to system settings
module.exports.mysql = {
host: 'localhost',
user: 'root',
password: '<PASSWORD>',
charset: 'UTF8MB4',
database: 'planner',
};
|
#!/usr/bin/env bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH
# set -euxo pipefail
cur_dir=$(
cd "$(dirname "$0")"
pwd
)
DATE="$(echo $(TZ=UTC date '+%Y-%m-%d %H:%M:%S'))"
check_dir="${cur_dir}/check"
tmpFile="./action.tmp"
Green_font_prefix="\033[32m" && Red_font_prefix="\033[31m" && Green_background_prefix="\033[42;37m" && Red_background_prefix="\033[41;37m" && Font_color_suffix="\033[0m"
Info="${Green_font_prefix}[Info]${Font_color_suffix}"
Error="${Red_font_prefix}[Error]${Font_color_suffix}"
Tip="${Green_font_prefix}[Tip]${Font_color_suffix}"
for item in {"aliyundrive", "idm", "mouseinc", "msedge", "office-iso"}; do
cd ${check_dir}
echo -e "Check ${item}..."
bash check-$item.sh
done
|
# Import the necessary Tello SDK library
from djitellopy import Tello
# Create a Tello object
tello = Tello()
# Connect to the Tello drone
tello.connect()
# Takeoff the drone
tello.takeoff()
# Prompt the user for input
user_input = input("Enter command: ")
# Check if the user input is 'q'
if user_input == 'q':
# Print "down" to indicate the drone movement
print("down")
# Instruct the Tello drone to move down by 30 units
tello.move_down(30)
# Land the drone
tello.land()
# Disconnect from the Tello drone
tello.end()
# Print "exit" to indicate the completion of the program
print("exit") |
<reponame>valitydev/sink-common-lib
package dev.vality.sink.common.serialization.impl;
import dev.vality.damsel.payment_processing.EventPayload;
public class PaymentEventPayloadDeserializer extends AbstractThriftBinaryDeserializer<EventPayload> {
@Override
public EventPayload deserialize(byte[] bin) {
return deserialize(bin, new EventPayload());
}
}
|
#!/bin/bash
set -e
set -x
# Dependencies
yarn add react-native-camera@4.2.1
yarn add react-native-qrcode-scanner
yarn add react-native-permissions
# Podfile
cd ios
if grep -q "Permission-Camera" Podfile
then
echo "Permission-Camera already supported, nothing to do here"
else
sed -i.bak '/RNCPushNotificationIOS/a\
\ pod "Permission-Camera", :path => "#{permissions_path}/Camera/Permission-Camera.podspec"
' Podfile
fi
if grep -q "permissions_path =" Podfile
then
echo "permissions_path already supported, nothing to do here"
else
sed -i.bak '/RNCPushNotificationIOS/a\
\ permissions_path = "../node_modules/react-native-permissions/ios"
' Podfile
fi
pod install && cd ..
echo "configured iOS settings" |
<reponame>vintprox/3dd-widget
import Widget from './classes/Widget';
import Service from './classes/Service';
export {
Widget,
Service
};
|
function validateEmail(email) {
// Regex for email validation
var re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
if(re.test(String(email).toLowerCase())) {
return true;
} else {
return false;
}
}
module.exports = validateEmail; |
# Copyright 2019, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#!/bin/bash
GCP_USER=$(gcloud config get-value account)
GIT_USER=$(git config --global user.name) |
/* PrintDDL.cpp */
//----------------------------------------------------------------------------------------
//
// Project: CCore 3.00
//
// Tag: Desktop
//
// License: Boost Software License - Version 1.0 - August 17th, 2003
//
// see http://www.boost.org/LICENSE_1_0.txt or the local copy
//
// Copyright (c) 2016 <NAME>. All rights reserved.
//
//----------------------------------------------------------------------------------------
#include <CCore/inc/video/PrintDDL.h>
#include <CCore/inc/Exception.h>
namespace CCore {
namespace Video {
/* struct DDLPrintableString */
void DDLPrintableString::GuardNotPrintable()
{
Printf(Exception,"CCore::Video::DDLPrintableString::PrintChar(...) : not printable character");
}
} // namespace Video
} // namespace CCore
|
package com.learn.demomarket.product.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.learn.common.utils.PageUtils;
import com.learn.demomarket.product.entity.AttrEntity;
import com.learn.demomarket.product.vo.AttrGroupRelationVo;
import com.learn.demomarket.product.vo.AttrRespVo;
import com.learn.demomarket.product.vo.AttrVo;
import java.util.List;
import java.util.Map;
/**
* 商品属性
*
* @author 996worker
* @email
* @date 2021-11-29 13:08:45
*/
public interface AttrService extends IService<AttrEntity> {
PageUtils queryPage(Map<String, Object> params);
void saveAttr(AttrVo attr);
PageUtils queryBaseAttrPage(Map<String, Object> params, Long catelogId, String type);
AttrRespVo getAttrInfo(Long attrId);
void updateAttrById(AttrVo attr);
List<AttrEntity> getRelationAttr(Long attrGroupId);
PageUtils getNoRelationAttr(Map<String, Object> params, Long attrgroupId);
void deleteRelation(AttrGroupRelationVo[] vos);
List<Long> selectSearchAttrs(List<Long> attrIds);
}
|
#
# Copyright 2016-2017 The OpenTracing Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
#
set -euo pipefail
set -x
build_started_by_tag() {
if [ "${TRAVIS_TAG}" == "" ]; then
echo "[Publishing] This build was not started by a tag, publishing snapshot"
return 1
else
echo "[Publishing] This build was started by the tag ${TRAVIS_TAG}, publishing release"
return 0
fi
}
is_pull_request() {
if [ "${TRAVIS_PULL_REQUEST}" != "false" ]; then
echo "[Not Publishing] This is a Pull Request"
return 0
else
echo "[Publishing] This is not a Pull Request"
return 1
fi
}
is_travis_branch_master() {
if [ "${TRAVIS_BRANCH}" = master ]; then
echo "[Publishing] Travis branch is master"
return 0
else
echo "[Not Publishing] Travis branch is not master"
return 1
fi
}
check_travis_branch_equals_travis_tag() {
#Weird comparison comparing branch to tag because when you 'git push --tags'
#the branch somehow becomes the tag value
#github issue: https://github.com/travis-ci/travis-ci/issues/1675
if [ "${TRAVIS_BRANCH}" != "${TRAVIS_TAG}" ]; then
echo "Travis branch does not equal Travis tag, which it should, bailing out."
echo " github issue: https://github.com/travis-ci/travis-ci/issues/1675"
exit 1
else
echo "[Publishing] Branch (${TRAVIS_BRANCH}) same as Tag (${TRAVIS_TAG})"
fi
}
check_release_tag() {
tag="${TRAVIS_TAG}"
if [[ "$tag" =~ ^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+(\.RC[[:digit:]]+)?$ ]]; then
echo "Build started by version tag $tag. During the release process tags like this"
echo "are created by the 'release' Maven plugin. Nothing to do here."
exit 0
elif [[ ! "$tag" =~ ^release-[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+(\.RC[[:digit:]]+)?$ ]]; then
echo "You must specify a tag of the format 'release-0.0.0' or 'release-0.0.0.RC0' to release this project."
echo "The provided tag ${tag} doesn't match that. Aborting."
exit 1
fi
}
is_release_commit() {
project_version=$(./mvnw help:evaluate -N -Dexpression=project.version|grep -v '\[')
if [[ "$project_version" =~ ^[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+(\.RC[[:digit:]]+)?$ ]]; then
echo "Build started by release commit $project_version. Will synchronize to maven central."
return 0
else
return 1
fi
}
release_version() {
echo "${TRAVIS_TAG}" | sed 's/^release-//'
}
safe_checkout_master() {
# We need to be on a branch for release:perform to be able to create commits, and we want that branch to be master.
# But we also want to make sure that we build and release exactly the tagged version, so we verify that the remote
# master is where our tag is.
git checkout -B master
git fetch origin master:origin/master
commit_local_master="$(git show --pretty='format:%H' master)"
commit_remote_master="$(git show --pretty='format:%H' origin/master)"
if [ "$commit_local_master" != "$commit_remote_master" ]; then
echo "Master on remote 'origin' has commits since the version under release, aborting"
exit 1
fi
}
#----------------------
# MAIN
#----------------------
if ! is_pull_request && build_started_by_tag; then
check_travis_branch_equals_travis_tag
check_release_tag
fi
./mvnw clean install -nsu
# If we are on a pull request, our only job is to run tests, which happened above via ./mvnw install
if is_pull_request; then
true
# If we are on master, we will deploy the latest snapshot or release version
# - If a release commit fails to deploy for a transient reason, delete the broken version from bintray and click rebuild
elif is_travis_branch_master; then
./mvnw --batch-mode -s ./.settings.xml -Prelease -nsu -DskipTests deploy
# If the deployment succeeded, sync it to Maven Central. Note: this needs to be done once per project, not module, hence -N
if is_release_commit; then
./mvnw --batch-mode -s ./.settings.xml -nsu -N io.zipkin.centralsync-maven-plugin:centralsync-maven-plugin:sync
fi
# If we are on a release tag, the following will update any version references and push a version tag for deployment.
elif build_started_by_tag; then
safe_checkout_master
./mvnw --batch-mode -s ./.settings.xml -Prelease -nsu -DreleaseVersion="$(release_version)" -Darguments="-DskipTests" release:prepare
fi
|
<gh_stars>0
// Copyright 2008, 2009 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.internal;
import org.apache.tapestry5.ioc.*;
import org.apache.tapestry5.ioc.def.ContributionDef;
/**
* Partially implements {@link org.apache.tapestry5.ioc.def.ContributionDef}, providing empty implementations of the
* three contribute() methods.
*/
public abstract class AbstractContributionDef implements ContributionDef
{
public void contribute(ModuleBuilderSource moduleSource, ServiceResources resources,
Configuration configuration)
{
}
public void contribute(ModuleBuilderSource moduleSource, ServiceResources resources,
OrderedConfiguration configuration)
{
}
public void contribute(ModuleBuilderSource moduleSource, ServiceResources resources,
MappedConfiguration configuration)
{
}
}
|
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Welcome to ${PREFIX}'s app. Jo jump in a ceramic toilet.
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
<div class="timeline">
<div class="timeline-item">
<div class="timeline-content">
<h3>Activity 1</h3>
<p>April 10, 2020 - August 3, 2020</p>
</div>
</div>
<div class="timeline-item">
<div class="timeline-content">
<h3>Activity 2</h3>
<p>August 10, 2020 - October 1, 2020</p>
</div>
</div>
</div> |
struct Closure {
closures: Vec<Box<dyn Fn()>>,
}
impl Closure {
fn new() -> Self {
Closure { closures: Vec::new() }
}
fn register<F>(&mut self, closure: F)
where
F: Fn() + 'static,
{
self.closures.push(Box::new(closure));
}
fn trigger(&self) {
for c in &self.closures {
c();
}
}
}
fn main() {
let mut closure = Closure::new();
closure.register(|| {
println!("Hello from Rust");
});
closure.trigger();
} |
#!/bin/bash
#### Liam: Add LSF tags here!
#------------------------------------------------------------------------------
# GEOS-Chem Global Chemical Transport Model !
#------------------------------------------------------------------------------
#BOP
#
# !MODULE: intTestExecute_lsf.sh
#
# !DESCRIPTION: Runs execution tests on various GEOS-Chem Classic
# run directories (using the LSF scheduler).
#\\
#\\
# !CALLING SEQUENCE:
# sbatch intTestExecute_lsf.sh
#
# !REVISION HISTORY:
# 03 Nov 2020 - R. Yantosca - Initial version
# See the subsequent Git history with the gitk browser!
#EOP
#------------------------------------------------------------------------------
#BOC
#============================================================================
# Global variable and function definitions
#============================================================================
# Get the long path of this folder
root=`pwd -P`
# In SLURM: Load software environment and OpenMP settings
# Otherwise (e.g. for testing) use a small number of OpenMP threads
if [[ "x${SLURM_JOBID}" == "x" ]]; then
export OMP_NUM_THREADS=6
else
. ~/.bashrc
. ${root}/gcclassic_env.sh
fi
# Load common functions for tests
. ${root}/commonFunctionsForTests.sh
# Count the number of tests to be done = number of run directories
numTests=$(count_rundirs ${root})
#============================================================================
# Initialize results logfile
#============================================================================
# Results logfile name
results="${root}/logs/results.execute.log"
rm -f ${results}
# Print header to results log file
print_to_log "${SEP_MAJOR}" ${results}
print_to_log "GEOS-Chem Classic: Execution Test Results" ${results}
print_to_log "" ${results}
print_to_log "Using ${OMP_NUM_THREADS} OpenMP threads" ${results}
print_to_log "Number of execution tests: ${numTests}" ${results}
print_to_log "${SEP_MAJOR}" ${results}
#============================================================================
# Run the GEOS-Chem executable in each GEOS-Chem run directory
#============================================================================
print_to_log " " ${results}
print_to_log "Execution tests:" ${results}
print_to_log "${SEP_MINOR}" ${results}
# Keep track of the number of tests that passed & failed
let passed=0
let failed=0
let remain=${numTests}
# Loop over rundirs and run GEOS-Chem
for runDir in *; do
# Do the following if for only valid GEOS-Chem run dirs
expr=$(is_valid_rundir "${root}/${runDir}")
if [[ "x${expr}" == "xTRUE" ]]; then
# Define log file
log="${root}/logs/execute.${runDir}.log"
rm -f ${LOG}
# Messages for execution pass & fail
passMsg="$runDir${FILL:${#runDir}}.....${EXE_PASS_STR}"
failMsg="$runDir${FILL:${#runDir}}.....${EXE_FAIL_STR}"
# Get the executable file corresponding to this run directory
exeFile=$(gcclassic_exe_name ${runDir})
# Test if the executable exists
if [[ -f ${root}/exe_files/${exeFile} ]]; then
#----------------------------------------------------------------
# If the executable file exists, we can do the test
#----------------------------------------------------------------
# Change to this run directory; remove leftover log file
cd ${root}/${runDir}
# Copy the executable file here
cp -f ${root}/exe_files/${exeFile} .
# Run the code if the executable is present. Then update the
# pass/fail counters and write a message to the results log file.
./${exeFile} >> ${log} 2>&1
if [[ $? -eq 0 ]]; then
let passed++
if [[ "x${results}" != "x" ]]; then
print_to_log "${passMsg}" ${results}
fi
else
let failed++
if [[ "x${results}" != "x" ]]; then
print_to_log "${failMsg}" ${results}
fi
fi
# Change to root directory for next iteration
cd ${root}
else
#----------------------------------------------------------------
# If the executable is missing, update the "fail" counter
# and write the "failed" message to the results log file.
#----------------------------------------------------------------
let failed++
if [[ "x${results}" != "x" ]]; then
print_to_log "${failMsg}" ${results}
fi
fi
# Decrement the count of remaining tests
let remain--
fi
done
#============================================================================
# Check the number of simulations that have passed
#============================================================================
# Print summary to log
print_to_log " " ${results}
print_to_log "Summary of test results:" ${results}
print_to_log "${SEP_MINOR}" ${results}
print_to_log "Execution tests passed: ${passed}" ${results}
print_to_log "Execution tests failed: ${failed}" ${results}
print_to_log "Execution tests not yet completed: ${remain}" ${results}
# Check if all tests passed
if [[ "x${passed}" == "x${numTests}" ]]; then
print_to_log "" ${results}
print_to_log "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%" ${results}
print_to_log "%%% All execution tests passed! %%%" ${results}
print_to_log "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%" ${results}
fi
#============================================================================
# Cleanup and quit
#============================================================================
# Free local variables
unset exeFile
unset failed
unset failmsg
unset log
unset numTests
unset passed
unset passMsg
unset remain
unset results
unset root
# Free imported global variables
unset FILL
unset LINE
unset LINELC
unset SED_INPUT_GEOS_1
unset SED_INPUT_GEOS_2
unset SED_HISTORY_RC
unset CMP_PASS_STR
unset CMP_FAIL_STR
unset EXE_PASS_STR
unset EXE_FAIL_STR
#EOC
|
#!/bin/sh
# install-unifi.sh
# Installs the Uni-Fi controller software on a FreeBSD machine (presumably running pfSense).
# The latest version of UniFi:
UNIFI_SOFTWARE_URL="http://dl.ubnt.com/unifi/6.0.28/UniFi.unix.zip"
# The rc script associated with this branch or fork:
RC_SCRIPT_URL="https://raw.githubusercontent.com/gozoinks/unifi-pfsense/master/rc.d/unifi.sh"
# If pkg-ng is not yet installed, bootstrap it:
if ! /usr/sbin/pkg -N 2> /dev/null; then
echo "FreeBSD pkgng not installed. Installing..."
env ASSUME_ALWAYS_YES=YES /usr/sbin/pkg bootstrap
echo " done."
fi
# If installation failed, exit:
if ! /usr/sbin/pkg -N 2> /dev/null; then
echo "ERROR: pkgng installation failed. Exiting."
exit 1
fi
# Determine this installation's Application Binary Interface
ABI=`/usr/sbin/pkg config abi`
# FreeBSD package source:
FREEBSD_PACKAGE_URL="https://pkg.freebsd.org/${ABI}/latest/All/"
# FreeBSD package list:
FREEBSD_PACKAGE_LIST_URL="https://pkg.freebsd.org/${ABI}/latest/packagesite.txz"
# Stop the controller if it's already running...
# First let's try the rc script if it exists:
if [ -f /usr/local/etc/rc.d/unifi.sh ]; then
echo -n "Stopping the unifi service..."
/usr/sbin/service unifi.sh stop
echo " done."
fi
# Then to be doubly sure, let's make sure ace.jar isn't running for some other reason:
if [ $(ps ax | grep -c "/usr/local/UniFi/lib/[a]ce.jar start") -ne 0 ]; then
echo -n "Killing ace.jar process..."
/bin/kill -15 `ps ax | grep "/usr/local/UniFi/lib/[a]ce.jar start" | awk '{ print $1 }'`
echo " done."
fi
# And then make sure mongodb doesn't have the db file open:
if [ $(ps ax | grep -c "/usr/local/UniFi/data/[d]b") -ne 0 ]; then
echo -n "Killing mongod process..."
/bin/kill -15 `ps ax | grep "/usr/local/UniFi/data/[d]b" | awk '{ print $1 }'`
echo " done."
fi
# If an installation exists, we'll need to back up configuration:
if [ -d /usr/local/UniFi/data ]; then
echo "Backing up UniFi data..."
BACKUPFILE=/var/backups/unifi-`date +"%Y%m%d_%H%M%S"`.tgz
/usr/bin/tar -vczf ${BACKUPFILE} /usr/local/UniFi/data
fi
# Add the fstab entries apparently required for OpenJDKse:
if [ $(grep -c fdesc /etc/fstab) -eq 0 ]; then
echo -n "Adding fdesc filesystem to /etc/fstab..."
echo -e "fdesc\t\t\t/dev/fd\t\tfdescfs\trw\t\t0\t0" >> /etc/fstab
echo " done."
fi
if [ $(grep -c proc /etc/fstab) -eq 0 ]; then
echo -n "Adding procfs filesystem to /etc/fstab..."
echo -e "proc\t\t\t/proc\t\tprocfs\trw\t\t0\t0" >> /etc/fstab
echo " done."
fi
# Run mount to mount the two new filesystems:
echo -n "Mounting new filesystems..."
/sbin/mount -a
echo " done."
#remove mongodb34 - discontinued
echo "Removing packages discontinued..."
if [ `pkg info | grep -c mongodb-` -eq 1 ]; then
env ASSUME_ALWAYS_YES=YES /usr/sbin/pkg delete mongodb
fi
if [ `pkg info | grep -c mongodb34-` -eq 1 ]; then
env ASSUME_ALWAYS_YES=YES /usr/sbin/pkg delete mongodb34
fi
echo " done."
# Install mongodb, OpenJDK, and unzip (required to unpack Ubiquiti's download):
# -F skips a package if it's already installed, without throwing an error.
echo "Installing required packages..."
tar xv -C / -f /usr/local/share/pfSense/base.txz ./usr/bin/install
#uncomment below for pfSense 2.2.x:
#env ASSUME_ALWAYS_YES=YES /usr/sbin/pkg install mongodb openjdk unzip pcre v8 snappy
fetch ${FREEBSD_PACKAGE_LIST_URL}
tar vfx packagesite.txz
AddPkg () {
pkgname=$1
pkginfo=`grep "\"name\":\"$pkgname\"" packagesite.yaml`
pkgvers=`echo $pkginfo | pcregrep -o1 '"version":"(.*?)"' | head -1`
# compare version for update/install
if [ `pkg info | grep -c $pkgname-$pkgvers` -eq 1 ]; then
echo "Package $pkgname-$pkgvers already installed."
else
env ASSUME_ALWAYS_YES=YES /usr/sbin/pkg add -f ${FREEBSD_PACKAGE_URL}${pkgname}-${pkgvers}.txz
# if update openjdk8 then force detele snappyjava to reinstall for new version of openjdk
if [ "$pkgname" == "openjdk8" ]; then
env ASSUME_ALWAYS_YES=YES /usr/sbin/pkg delete snappyjava
fi
fi
}
AddPkg snappy
AddPkg cyrus-sasl
AddPkg xorgproto
AddPkg python37
AddPkg v8
AddPkg icu
AddPkg boost-libs
AddPkg mongodb36
AddPkg unzip
AddPkg pcre
AddPkg alsa-lib
AddPkg freetype2
AddPkg fontconfig
AddPkg libXdmcp
AddPkg libpthread-stubs
AddPkg libXau
AddPkg libxcb
AddPkg libICE
AddPkg libSM
AddPkg java-zoneinfo
AddPkg libX11
AddPkg libXfixes
AddPkg libXext
AddPkg libXi
AddPkg libXt
AddPkg libfontenc
AddPkg mkfontscale
AddPkg dejavu
AddPkg libXtst
AddPkg libXrender
AddPkg libinotify
AddPkg javavmwrapper
AddPkg giflib
AddPkg openjdk8
AddPkg snappyjava
# Clean up downloaded package manifest:
rm packagesite.*
echo " done."
# Switch to a temp directory for the Unifi download:
cd `mktemp -d -t unifi`
# Download the controller from Ubiquiti (assuming acceptance of the EULA):
echo -n "Downloading the UniFi controller software..."
/usr/bin/fetch ${UNIFI_SOFTWARE_URL}
echo " done."
# Unpack the archive into the /usr/local directory:
# (the -o option overwrites the existing files without complaining)
echo -n "Installing UniFi controller in /usr/local..."
/usr/local/bin/unzip -o UniFi.unix.zip -d /usr/local
echo " done."
# Update Unifi's symbolic link for mongod to point to the version we just installed:
echo -n "Updating mongod link..."
/bin/ln -sf /usr/local/bin/mongod /usr/local/UniFi/bin/mongod
echo " done."
# If partition size is < 4GB, add smallfiles option to mongodb
echo -n "Checking partition size..."
if [ `df -k | awk '$NF=="/"{print $2}'` -le 4194302 ]; then
echo -e "\nunifi.db.extraargs=--smallfiles\n" >> /usr/local/UniFi/data/system.properties
fi
echo " done."
# Replace snappy java library to support AP adoption with latest firmware:
echo -n "Updating snappy java..."
unifizipcontents=`zipinfo -1 UniFi.unix.zip`
upstreamsnappyjavapattern='/(snappy-java-[^/]+\.jar)$'
# Make sure exactly one match is found
if [ $(echo "${unifizipcontents}" | egrep -c ${upstreamsnappyjavapattern}) -eq 1 ]; then
upstreamsnappyjava="/usr/local/UniFi/lib/`echo \"${unifizipcontents}\" | pcregrep -o1 ${upstreamsnappyjavapattern}`"
mv "${upstreamsnappyjava}" "${upstreamsnappyjava}.backup"
cp /usr/local/share/java/classes/snappy-java.jar "${upstreamsnappyjava}"
echo " done."
else
echo "ERROR: Could not locate UniFi's snappy java! AP adoption will most likely fail"
fi
# Fetch the rc script from github:
echo -n "Installing rc script..."
/usr/bin/fetch -o /usr/local/etc/rc.d/unifi.sh ${RC_SCRIPT_URL}
echo " done."
# Fix permissions so it'll run
chmod +x /usr/local/etc/rc.d/unifi.sh
# Add the startup variable to rc.conf.local.
# Eventually, this step will need to be folded into pfSense, which manages the main rc.conf.
# In the following comparison, we expect the 'or' operator to short-circuit, to make sure the file exists and avoid grep throwing an error.
if [ ! -f /etc/rc.conf.local ] || [ $(grep -c unifi_enable /etc/rc.conf.local) -eq 0 ]; then
echo -n "Enabling the unifi service..."
echo "unifi_enable=YES" >> /etc/rc.conf.local
echo " done."
fi
# Restore the backup:
if [ ! -z "${BACKUPFILE}" ] && [ -f ${BACKUPFILE} ]; then
echo "Restoring UniFi data..."
mv /usr/local/UniFi/data /usr/local/UniFi/data-`date +%Y%m%d-%H%M`
/usr/bin/tar -vxzf ${BACKUPFILE} -C /
fi
# Start it up:
echo -n "Starting the unifi service..."
/usr/sbin/service unifi.sh start
echo " done."
|
module.exports = {
'**/*.+(js|json|ts|md|eslintrc|prettierrc)': ['prettier --write']
}
|
<filename>0-frameworks/mrpc/example/test_server.cpp
#include "server.h"
double Add(double a, double b) {
return a + b;
}
int main(int argc, char* argv[]) {
short port = 8080;
asio::io_context io_context;
mrpc::Server server(io_context, port);
// Bind functions.
server.Bind<float, float, float>("add", Add);
server.Bind<int, int, int>("multiply",
[](int a, int b) -> int { return a * b; });
std::cout << "Initialise an IPv4 TCP endpoint for port " << port << std::endl;
std::cout << "Running..." << std::endl;
io_context.run();
return 0;
} |
# This shell script executes Slurm jobs for thresholding
# predictions of convolutional
# neural network with adaptive threshold on BirdVox-70k full audio
# with logmelspec input.
# Augmentation kind: none.
# Test unit: unit05.
# Trial ID: 2.
sbatch 045_aug-none_test-unit05_predict-unit05_trial-2.sbatch
sbatch 045_aug-none_test-unit05_predict-unit02_trial-2.sbatch
sbatch 045_aug-none_test-unit05_predict-unit03_trial-2.sbatch
|
<reponame>anticore/automaton
export interface Serializable<T> {
serialize: () => T;
deserialize: ( data: T ) => void;
}
|
<reponame>BaristaFramework/crema
/**
* @class BlockItem
*/
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import styles from './styles.css';
// TODO import { TYPES_LABEL_POSITION, TYPES_LAYOUT } from './consts';
import { qualitativeColorFormatter } from './utils';
class BlockItem extends PureComponent {
render() {
const { colorFormatter, index, total, value } = this.props;
const backgroundColor = colorFormatter(value, index, total);
const itemStyle = { backgroundColor };
return <b className={styles.blockItem} style={itemStyle} />;
}
}
BlockItem.propTypes = {
colorFormatter: PropTypes.func,
index: PropTypes.number,
name: PropTypes.string,
total: PropTypes.number.isRequired,
value: PropTypes.number.isRequired
};
BlockItem.defaultProps = {
colorFormatter: qualitativeColorFormatter,
index: 0
};
export default BlockItem;
|
#!/usr/bin/env bash
# Copyright (c) 2017, 2019, Oracle Corporation and/or its affiliates. All rights reserved.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
#
# When the customer enables the operator's external REST api (by setting
# externalRestEnabled to true when installing the operator helm chart), the customer needs
# to provide the certificate and private key for api's SSL identity too (by creating a
# tls secret before the installation of the operator helm chart).
#
# This sample script generates a self-signed certificate and private key that can be used
# for the operator's external REST api when experimenting with the operator. They should
# not be used in a production environment.
#
# The sytax of the script is:
#
# kubernetes/samples/scripts/rest/generate-external-rest-identity.sh -a <SANs> -n <namespace>
#
# Where <SANs> lists the subject alternative names to put into the generated self-signed
# certificate for the external WebLogic Operator REST https interface, for example:
#
# DNS:myhost,DNS:localhost,IP:127.0.0.1
#
# You should include the addresses of all masters and load balancers in this list. The certificate
# cannot be conveniently changed after installation of the operator.
#
# The script creates the secret in the weblogic-operator namespace with the self-signed
# certificate and private key
#
# Example usage:
# generate-external-rest-identity.sh -a IP:127.0.0.1 -n weblogic-operator > my_values.yaml
# echo "externalRestEnabled: true" >> my_values.yaml
# ...
# helm install kubernetes/charts/weblogic-operator --name my_operator --namespace my_operator-ns --values my_values.yaml --wait
usage(){
cat <<EOF
Usage: $0 [options] -a <subject alternative names> -n <namespace>
Options:
-a SANS Required, the SANs for the certificate
-n NAMESPACE Required, the namespace where the secret will be created.
-s SECRET_NAME Optional, the name of the kubernetes secret. Default is: weblogic-operator-external-rest-identity.
-h, --help Display this help text.
EOF
exit 1
}
if [ ! -x "$(command -v keytool)" ]; then
echo "Can't find keytool. Please add it to the path."
exit 1
fi
if [ ! -x "$(command -v openssl)" ]; then
echo "Can't find openssl. Please add it to the path."
exit 1
fi
if [ ! -x "$(command -v base64)" ]; then
echo "Can't find base64. Please add it to the path."
exit 1
fi
TEMP_DIR=`mktemp -d`
if [ $? -ne 0 ]; then
echo "$0: Can't create temp directory."
exit 1
fi
if [ -z $TEMP_DIR ]; then
echo "Can't create temp directory."
exit 1
fi
function cleanup {
rm -r $TEMP_DIR
if [[ $SUCCEEDED != "true" ]]; then
exit 1
fi
}
set -e
#set -x
trap "cleanup" EXIT
SECRET_NAME="weblogic-operator-external-rest-identity"
while [ $# -gt 0 ]
do
key="$1"
case $key in
-a)
shift # past argument
if [ $# -eq 0 ] || [ ${1:0:1} == "-" ]; then echo "SANs is required and is missing"; usage; fi
SANS=$1
shift # past value
;;
-n)
shift # past argument
if [ $# -eq 0 ] || [ ${1:0:1} == "-" ]; then echo "Namespace is required and is missing"; usage; fi
NAMESPACE=$1
shift # past value
;;
-s)
shift # past argument
if [ $# -eq 0 ] || [ ${1:0:1} == "-" ]; then echo "Invalid secret name $1"; usage; fi
SECRET_NAME=$1
shift # past value
;;
-h)
shift # past argument
;;
*)
SANS=$1
shift # past argument
;;
esac
done
if [ -z "$SANS" ]
then
1>&2
echo "SANs is required and is missing"
usage
fi
if [ -z "$NAMESPACE" ]
then
1>&2
echo "Namespace is required and is missing"
usage
fi
DAYS_VALID="3650"
TEMP_PW="temp_password"
OP_PREFIX="weblogic-operator"
OP_ALIAS="${OP_PREFIX}-alias"
OP_JKS="${TEMP_DIR}/${OP_PREFIX}.jks"
OP_PKCS12="${TEMP_DIR}/${OP_PREFIX}.p12"
OP_CSR="${TEMP_DIR}/${OP_PREFIX}.csr"
OP_CERT_PEM="${TEMP_DIR}/${OP_PREFIX}.cert.pem"
OP_KEY_PEM="${TEMP_DIR}/${OP_PREFIX}.key.pem"
# generate a keypair for the operator's REST service, putting it in a keystore
keytool \
-genkey \
-keystore ${OP_JKS} \
-alias ${OP_ALIAS} \
-storepass ${TEMP_PW} \
-keypass ${TEMP_PW} \
-keysize 2048 \
-keyalg RSA \
-validity ${DAYS_VALID} \
-dname "CN=weblogic-operator" \
-ext KU=digitalSignature,nonRepudiation,keyEncipherment,dataEncipherment,keyAgreement \
-ext SAN="${SANS}" \
2> /dev/null
# extract the cert to a pem file
keytool \
-exportcert \
-keystore ${OP_JKS} \
-storepass ${TEMP_PW} \
-alias ${OP_ALIAS} \
-rfc \
> ${OP_CERT_PEM} 2> /dev/null
# convert the keystore to a pkcs12 file
keytool \
-importkeystore \
-srckeystore ${OP_JKS} \
-srcstorepass ${TEMP_PW} \
-destkeystore ${OP_PKCS12} \
-srcstorepass ${TEMP_PW} \
-deststorepass ${TEMP_PW} \
-deststoretype PKCS12 \
2> /dev/null
# extract the private key from the pkcs12 file to a pem file
openssl \
pkcs12 \
-in ${OP_PKCS12} \
-passin pass:${TEMP_PW} \
-nodes \
-nocerts \
-out ${OP_KEY_PEM} \
2> /dev/null
set +e
# Check if namespace exist
kubectl get namespace $NAMESPACE >/dev/null 2>/dev/null
if [ $? -eq 1 ]; then
echo "Namespace $NAMESPACE does not exist"
exit 1
fi
kubectl get secret $SECRET_NAME -n $NAMESPACE >/dev/null 2>/dev/null
if [ $? -eq 1 ]; then
kubectl create secret tls "$SECRET_NAME" --cert=${OP_CERT_PEM} --key=${OP_KEY_PEM} -n $NAMESPACE >/dev/null
fi
echo "externalRestIdentitySecret: $SECRET_NAME"
SUCCEEDED=true
|
const html = require('choo/html')
module.exports = () => html`
<section id="community" class="w-100 center bg-light-gray pa3 mv3 tl">
<h1 class="dark-gray f2 tc">Community</h1>
<p class="f3 w-80-ns lh-copy ph5-ns mid-gray center">
ScienceFair v1 was developed by
<a class="no-underline bg-animate hover-bg-light-blue silver hover-dark-gray" href="https://codeforscience.org" target="_blank">Code for Science</a>
and
<a class="no-underline bg-animate hover-bg-light-blue silver hover-dark-gray" href="http://fathomlabs.io" target="_blank">FathomLabs</a>
along with
<a class="no-underline bg-animate hover-bg-light-blue silver hover-dark-gray" href="https://github.com/codeforscience/sciencefair/graphs/contributors" target="_blank">other contributors</a>,
with support from
<a class="no-underline bg-animate hover-bg-light-blue silver hover-dark-gray" href="https://science.mozilla.org" target="_blank">Mozilla Science Lab</a>,
<a class="no-underline bg-animate hover-bg-light-blue silver hover-dark-gray" href="https://elifesciences.org" target="_blank">eLife</a>
and the
<a class="no-underline bg-animate hover-bg-light-blue silver hover-dark-gray" href="https://datproject.org" target="_blank">Dat</a>
project.
</p>
<p class="f3 w-80-ns lh-copy ph5-ns mid-gray center">
If you find a bug in ScienceFair please report it on the
<a class="no-underline bg-animate hover-bg-light-blue silver hover-dark-gray" href="https://github.com/codeforscience/sciencefair/issues" target="_blank">issue tracker</a>.
You can chat with us any time on
<a class="no-underline bg-animate hover-bg-light-blue silver hover-dark-gray" href="https://webchat.freenode.net/?channels=sciencefair" target="_blank">IRC (#sciencefair on freenode.net)</a>.
</p>
<p class="f3 w-80-ns lh-copy ph5-ns mid-gray center">
ScienceFair is an Open Source project. Code is licensed under the
<a class="no-underline bg-animate hover-bg-light-blue silver hover-dark-gray" href="https://github.com/codeforscience/sciencefair/blob/master/LICENSE" target="_blank">MIT license</a>
and hosted
<a class="no-underline bg-animate hover-bg-light-blue silver hover-dark-gray" href="https://github.com/codeforscience/sciencefair" target="_blank">on Github</a>.
We
<a class="no-underline bg-animate hover-bg-light-blue silver hover-dark-gray" href="https://github.com/codeforscience/sciencefair/blob/master/CONTRIBUTING.md" target="_blank">welcome contributions</a>
of any kind from anyone, provided you follow our
<a class="no-underline bg-animate hover-bg-light-blue silver hover-dark-gray" href="https://github.com/codeforscience/sciencefair/blob/master/CODE_OF_CONDUCT.md" target="_blank">code of conduct</a>.
</p>
<div class="w-80 ph3-ns pv4 tc center">
<div class="cf w-50-ns ph2-ns tc center">
<div class="dib fl w-100 w-50-ns tc">
<img src="assets/codeforscience_logo.png" alt="Code for Science"/>
</div>
<div class="dib fl w-100 w-50-ns tc">
<img src="assets/fathomlabs_logo.png" alt="Fathom Labs"/>
</div>
</div>
<div class="cf w-two-thirds-ns ph2-ns pv2 tc center">
<div class="dib fl w-100 w-third-ns tc">
<img src="assets/sciencelab_logo.png" alt="Mozilla Science Lab"/>
</div>
<div class="dib fl w-100 w-third-ns tc">
<img src="assets/dat_logo.png" alt="Dat data"/>
</div>
<div class="dib fl w-100 w-third-ns tc">
<img src="assets/elife_logo.png" alt="eLife Sciences"/>
</div>
</div>
</div>
</section>
`
|
#!/bin/sh
# Use the command below to parse multiple dtoverlay statements in config.txt using the SV API
# cat /mnt/boot/config.txt | grep -r "dtoverlay" /mnt/boot/config.txt | awk -F= 'BEGIN { ORS="" }; {print "\""$2"\""}' | head -c -1
# Use the command below to parse through a single line of dtoverlay in the config.txt
cat /mnt/boot/config.txt | grep -r "dtoverlay" /mnt/boot/config.txt | cut -c 11- | awk '{print "\""$1"\""}'
|
def quick_sort(array):
if len(array) <= 1:
return array
else:
left, right, pivot = [], [], array[0]
for i in array[1:]:
if i <= pivot:
left.append(i)
else:
right.append(i)
return quick_sort(left) + [pivot] + quick_sort(right)
if __name__ == '__main__':
array = [2, 4, 1, 5, 3]
print(quick_sort(array)) # prints [1, 2, 3, 4, 5] |
<reponame>tvalodia/battletron<filename>webapp/src/app/spectate-game/spectate-game.component.ts
import {Component, OnInit, ViewChild} from '@angular/core';
import {GameService} from "../api/game.service";
import {WebsocketService} from "../game-view/websocket.service";
import {Game, GameViewService} from "../game-view/game-view.service";
import {GameViewComponent} from "../game-view/game-view.component";
import {SpectateGame} from "./spectate-game";
@Component({
selector: 'app-spectate-game',
templateUrl: './spectate-game.component.html',
styleUrls: ['./spectate-game.component.css'],
providers: [WebsocketService, GameViewService]
})
export class SpectateGameComponent implements OnInit {
spectateGameData: SpectateGame = new SpectateGame();
games: Array<Game>;
@ViewChild('gameView') gameViewRef: GameViewComponent;
constructor(private gameService: GameService) {
}
ngOnInit() {
this.getGames();
}
onSessionId(sessionId: string) {
this.spectateGameData.sessionId = sessionId;
}
public getGames() {
this.gameService.getGames().subscribe((data: Array<Game>) => {
this.games = data;
console.log(data);
});
}
public refresh() {
this.getGames();
}
public spectateGame(gameId: number) {
this.gameService.spectateGame(gameId, this.spectateGameData).subscribe((data: Game) => {
this.gameViewRef.setGame(data);
console.log(gameId);
});
}
}
|
var assert = require('assert')
var P = require('./index.js')
var a = { a: 1 }
var b = { b: 1 }
var t = require('tap')
t.jobs = 64
process.env.TAP_BUFFER = 1
t.test(function removeFirstItem (t) {
var p = new P
p.add(a)
p.add(b)
p.remove(a)
t.equal(p.length, 1)
t.equal(p.head, p.tail)
t.equal(p.head.data, b)
t.end()
})
t.test(function removeTail (t) {
var p = new P
p.add(a)
p.add(b)
p.remove(b)
t.equal(p.length, 1)
t.equal(p.head, p.tail)
t.equal(p.head.data, a)
t.end()
})
t.test(function removeAll (t) {
var p = new P
p.add(a)
p.add(b)
p.remove(a)
p.remove(b)
t.equal(p.length, 0)
t.equal(p.head, p.tail)
t.equal(p.head, null)
t.end()
})
t.test(function removeExtra (t) {
var p = new P
p.add(a)
p.add(b)
p.remove(b)
p.remove({some: 'thing not in there'})
p.remove(a)
p.remove(a)
p.remove(a)
p.remove(a)
t.equal(p.length, 0)
t.equal(p.head, p.tail)
t.equal(p.head, null)
t.end()
})
|
<filename>Modules/IO/ImageIO/include/otbScalarBufferToImageFileWriter.hxx
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef otbScalarBufferToImageFileWriter_hxx
#define otbScalarBufferToImageFileWriter_hxx
#include "otbMacro.h"
#include "otbScalarBufferToImageFileWriter.h"
#include "itkImageRegionIterator.h"
#include "itkImageRegionIteratorWithIndex.h"
namespace otb
{
template<class TBufferType, class TOutputPixelType>
ScalarBufferToImageFileWriter<TBufferType, TOutputPixelType>
::ScalarBufferToImageFileWriter() : m_Buffer(nullptr), m_NumberOfChannels(0), m_InverseXSpacing(false)
{
m_Writer = WriterType::New();
m_ImageSize.Fill(0);
}
template<class TBufferType, class TOutputPixelType>
void
ScalarBufferToImageFileWriter<TBufferType, TOutputPixelType>::GenerateData()
{
// Check image parameters
if( (m_ImageSize[0]==0) || (m_ImageSize[0]==0) )
{
itkExceptionMacro("Invalid output image size, Size can't be null.");
}
if( m_NumberOfChannels==0 )
{
itkExceptionMacro("Invalid output image number of channels.");
}
RegionType lRegion;
IndexType lId;
lId.Fill(0);
lRegion.SetIndex(lId);
lRegion.SetSize(m_ImageSize);
typename ImageType::Pointer lImage = ImageType::New();
lImage->SetRegions( lRegion );
lImage->SetNumberOfComponentsPerPixel(m_NumberOfChannels);
lImage->Allocate();
PixelType lPix;
lPix.SetSize(m_NumberOfChannels);
lPix.Fill( itk::NumericTraits<OutputPixelType>::Zero );
lImage->FillBuffer(lPix);
// 1 specific loop for each case to save time processing
if(m_InverseXSpacing == false)
{
itk::ImageRegionIterator<ImageType> it(lImage, lRegion);
it.GoToBegin();
unsigned int cpt(0);
while( it.IsAtEnd()==false )
{
for(unsigned int i=0; i<m_NumberOfChannels; ++i)
{
lPix[i] = static_cast<OutputPixelType>(m_Buffer[cpt]);
++cpt;
}
it.Set( lPix );
++it;
}
}
else
{
itk::ImageRegionIteratorWithIndex<ImageType> it(lImage, lRegion);
it.GoToBegin();
// cpt is the first component of the last pixel
unsigned int cpt(0);
while( it.IsAtEnd()==false )
{
IndexType index = it.GetIndex();
cpt = (m_ImageSize[1] - 1 - index[1]) * m_NumberOfChannels * m_ImageSize[0] + m_NumberOfChannels * index[0];
for(unsigned int i=0; i<m_NumberOfChannels; ++i)
{
lPix[i] = static_cast<OutputPixelType>(m_Buffer[cpt+i]);
}
it.Set( lPix );
++it;
}
}
m_Writer->SetInput( lImage );
m_Writer->Update();
}
template<class TBufferType, class TOutputPixelType>
void
ScalarBufferToImageFileWriter<TBufferType, TOutputPixelType>
::PrintSelf(std::ostream& os, itk::Indent indent) const
{
Superclass::PrintSelf(os, indent);
os << indent << "FileName" << m_Writer->GetFileName() << std::endl;
os << indent << "Size" << m_ImageSize << std::endl;
os << indent << "NumberOfChannels" << m_NumberOfChannels << std::endl;
}
} // end namespace otb
#endif
|
TERMUX_PKG_HOMEPAGE=https://github.com/termux/science-packages
TERMUX_PKG_DESCRIPTION="Package repository containing science software"
TERMUX_PKG_LICENSE="Apache-2.0"
TERMUX_PKG_MAINTAINER="Henrik Grimler @Grimler91"
TERMUX_PKG_VERSION=1.0
TERMUX_PKG_DEPENDS="termux-keyring"
TERMUX_PKG_SKIP_SRC_EXTRACT=true
TERMUX_PKG_PLATFORM_INDEPENDENT=true
termux_step_make_install() {
mkdir -p $TERMUX_PREFIX/etc/apt/sources.list.d
echo "deb https://dl.bintray.com/grimler/science-packages-24 science stable" > $TERMUX_PREFIX/etc/apt/sources.list.d/science.list
}
termux_step_create_debscripts() {
echo "#!$TERMUX_PREFIX/bin/sh" > postinst
echo "echo Downloading updated package list ..." >> postinst
echo "apt update" >> postinst
echo "exit 0" >> postinst
}
|
/**
* Copyright 2020 DreamWorks Animation L.L.C.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dreamworks.forestflow.serving.MLFlow
import cats.syntax.either._
import com.dreamworks.forestflow.serving.impl.{LocalFileArtifactReader, MLFlowH2OLoader, UnsupportedServableFlavor}
import com.dreamworks.forestflow.serving.interfaces.ArtifactReader
import com.dreamworks.forestflow.utils.SourceStorageProtocols
import io.circe.generic.extras._
import io.circe.generic.extras.semiauto.deriveDecoder
import io.circe.{CursorOp, Decoder, DecodingFailure}
import com.dreamworks.forestflow.serving.interfaces.Loader
import com.dreamworks.forestflow.utils.ThrowableImplicits._
import org.joda.time.{DateTimeZone, LocalDateTime}
case class MLFlowModelSpec(
artifactReader: ArtifactReader,
runId: Option[String],
timeCreated: Long,
flavors: Map[String, Loader]
) {
def getServableFlavor: Option[(String, Loader)] = flavors.collectFirst { case (flavor, loader) if !loader.isInstanceOf[UnsupportedServableFlavor] => (flavor, loader) }
}
object MLFlowModelSpec {
implicit val config: Configuration = {
val baseConfig = Configuration.default.withSnakeCaseMemberNames
baseConfig.copy(transformMemberNames = baseConfig.transformMemberNames andThen {
// from snake_case in class to snake_case file
case "artifact_reader" => "artifact_path"
case "time_created" => "utc_time_created" // utc_time_created is a string!
case other => other
})
}
implicit val decodeTimeCreated: Decoder[Long] = Decoder.decodeString.emap{ tm: String =>
Either.catchNonFatal[Long]({
var ts = tm.replace(" ", "T")
if (ts.takeRight(1) != "Z")
ts = ts + "Z"
val ll = LocalDateTime.parse(tm.replace(" ", "T")).toDateTime(DateTimeZone.UTC)
ll.getMillis
}
).leftMap(t => s"timeCreated Decoder Failed: ${t.printableStackTrace}")
}.handleErrorWith(_ => Decoder.decodeLong)
implicit val decodeMLFlowModel: Decoder[MLFlowModelSpec] = deriveDecoder[MLFlowModelSpec]
implicit val decodeArtifactReaderString: Decoder[ArtifactReader] = Decoder.decodeOption[String].emap { artifactPath: Option[String] =>
Either.catchNonFatal[ArtifactReader]({
artifactPath match {
case Some(path) => ArtifactReader.getArtifactReader(path)
case _ => LocalFileArtifactReader("")
}
}
).leftMap(t => s"Artifact Reader Decoder Failed: ${t.printableStackTrace}")
}
implicit val decodeServableFlavor: Decoder[Map[String, Loader]] = Decoder.decodeMap[String, Map[String, String]].emap { flavors =>
Either.catchNonFatal[Map[String, Loader]](
flavors
.map { case (flavor, props) => (flavor.toLowerCase, props) }
.map {
case (f@"h2o_mojo", props) => f -> MLFlowH2OLoader(dataPath = props.getOrElse("data", ""), version = props.get("h2o_version"))
/***
* UnsupportedServableFlavor catch-all case must exist otherwise any flavor that we don't support will
* immediately raise an error without checking ability for support of other supplied flavors.
*/
case (f, props) => f -> UnsupportedServableFlavor(props)
case (f, props) => throw DecodingFailure(s"Unexpected or unsupported flavor type [$f] with props $props", List[CursorOp]())
// TODO: Support POJO?
// case (f, _) => p -> BasicSourceProvider()
}
).leftMap(t => t.printableStackTrace)
}
}
|
"use strict";
exports.index = function *() {
yield this.render('/home/index', {});
}; |
#!/usr/bin/env bash
set -o errexit
set -o nounset
set -o pipefail
set -o xtrace
INTEGRATION_OUTPUT_JUNIT=${INTEGRATION_OUTPUT_JUNIT:-false}
if [ "$INTEGRATION_OUTPUT_JUNIT" == true ]
then
echo "Running integration tests with junit output"
mkdir -p reports/
go get github.com/jstemmer/go-junit-report
go test -tags integration ./pkg/... ./cmd/... -v -mod=readonly -coverprofile cover-integration.out 2>&1 |tee /dev/fd/2 |go-junit-report -set-exit-code > reports/integration_report.xml
go run ./cmd/kubectl-kudo test 2>&1 |tee /dev/fd/2 |go-junit-report -set-exit-code > reports/kudo_test_report.xml
else
echo "Running integration tests without junit output"
go test -tags integration ./pkg/... ./cmd/... -v -mod=readonly -coverprofile cover-integration.out
go run ./cmd/kubectl-kudo test
fi
|
import React from "react"
import Container from "../components/Container"
import Content from "../components/Content"
import SEO from "../components/SEO"
export default () => {
return (
<React.Fragment>
<SEO title={'Datenschutzerklärung'}/>
<Container>
<Content>
<h1>Datenschutzerklärung</h1>
<p>Eine Nutzung der Internetseiten der
<NAME>pler ist grundsätzlich ohne jede Angabe personenbezogener Daten möglich. Sofern eine
betroffene Person besondere Services über diese Internetseite in Anspruch nehmen möchte,
könnte jedoch eine Verarbeitung personenbezogener Daten erforderlich werden. Ist die Verarbeitung
personenbezogener Daten erforderlich und besteht für eine solche Verarbeitung keine gesetzliche Grundlage,
holt <NAME> generell eine Einwilligung der betroffenen Person ein.</p>
<p>Die Verarbeitung personenbezogener Daten, beispielsweise des Namens, der Anschrift, E-Mail-Adresse oder
Telefonnummer einer betroffenen Person, erfolgt stets im Einklang mit der Datenschutz-Grundverordnung und in
Übereinstimmung mit den für Anja Wippler geltenden landesspezifischen
Datenschutzbestimmungen. Mittels dieser Datenschutzerklärung möchte <NAME> die Öffentlichkeit über
Art, Umfang und Zweck der von uns erhobenen, genutzten und verarbeiteten personenbezogenen Daten
informieren.
Ferner werden betroffene Personen mittels dieser Datenschutzerklärung über die ihnen zustehenden Rechte
aufgeklärt.</p>
<p><NAME> hat als für die Verarbeitung Verantwortlicher zahlreiche technische und
organisatorische Maßnahmen umgesetzt, um einen möglichst lückenlosen Schutz der über diese Internetseite
verarbeiteten personenbezogenen Daten sicherzustellen. Dennoch können Internetbasierte Datenübertragungen
grundsätzlich Sicherheitslücken aufweisen, sodass ein absoluter Schutz nicht gewährleistet werden kann. Aus
diesem Grund steht es jeder betroffenen Person frei, personenbezogene Daten auch auf alternativen Wegen,
beispielsweise telefonisch, an uns zu übermitteln.</p>
<h3>1. Begriffsbestimmungen</h3>
<p>Die Datenschutzerklärung von <NAME> beruht auf den Begrifflichkeiten, die durch den
Europäischen Richtlinien- und Verordnungsgeber beim Erlass der Datenschutz-Grundverordnung (DS-GVO)
verwendet
wurden. Diese Datenschutzerklärung soll für die Öffentlichkeit einfach lesbar und verständlich sein. Um dies
zu gewährleisten, möchte <NAME> vorab die verwendeten Begrifflichkeiten erläutern.</p>
<p>Wir verwenden in dieser Datenschutzerklärung unter anderem die folgenden Begriffe:</p>
<ul style={{ listStyle: "none" }}>
<li><h3>a) personenbezogene Daten</h3>
<p>Personenbezogene Daten sind alle Informationen, die sich auf eine identifizierte oder identifizierbare
natürliche Person (im Folgenden „betroffene Person“) beziehen. Als identifizierbar wird eine natürliche
Person angesehen, die direkt oder indirekt, insbesondere mittels Zuordnung zu einer Kennung wie einem
Namen,
zu einer Kennnummer, zu Standortdaten, zu einer Online-Kennung oder zu einem oder mehreren besonderen
Merkmalen, die Ausdruck der physischen, physiologischen, genetischen, psychischen, wirtschaftlichen,
kulturellen oder sozialen Identität dieser natürlichen Person sind, identifiziert werden kann.</p>
</li>
<li><h3>b) betroffene Person</h3>
<p>Betroffene Person ist jede identifizierte oder identifizierbare natürliche Person, deren
personenbezogene
Daten von dem für die Verarbeitung Verantwortlichen verarbeitet werden.</p>
</li>
<li><h3>c) Verarbeitung</h3>
<p>Verarbeitung ist jeder mit oder ohne Hilfe automatisierter Verfahren ausgeführte Vorgang oder jede
solche
Vorgangsreihe im Zusammenhang mit personenbezogenen Daten wie das Erheben, das Erfassen, die
Organisation,
das Ordnen, die Speicherung, die Anpassung oder Veränderung, das Auslesen, das Abfragen, die Verwendung,
die
Offenlegung durch Übermittlung, Verbreitung oder eine andere Form der Bereitstellung, den Abgleich oder
die
Verknüpfung, die Einschränkung, das Löschen oder die Vernichtung.</p>
</li>
<li><h3>d) Einschränkung der Verarbeitung</h3>
<p>Einschränkung der Verarbeitung ist die Markierung gespeicherter personenbezogener Daten mit dem Ziel,
ihre
künftige Verarbeitung einzuschränken.</p>
</li>
<li><h3>e) Profiling</h3>
<p>Profiling ist jede Art der automatisierten Verarbeitung personenbezogener Daten, die darin besteht,
dass
diese personenbezogenen Daten verwendet werden, um bestimmte persönliche Aspekte, die sich auf eine
natürliche Person beziehen, zu bewerten, insbesondere, um Aspekte bezüglich Arbeitsleistung,
wirtschaftlicher Lage, Gesundheit, persönlicher Vorlieben, Interessen, Zuverlässigkeit, Verhalten,
Aufenthaltsort oder Ortswechsel dieser natürlichen Person zu analysieren oder vorherzusagen.</p>
</li>
<li><h3>f) Pseudonymisierung</h3>
<p>Pseudonymisierung ist die Verarbeitung personenbezogener Daten in einer Weise, auf welche die
personenbezogenen Daten ohne Hinzuziehung zusätzlicher Informationen nicht mehr einer spezifischen
betroffenen Person zugeordnet werden können, sofern diese zusätzlichen Informationen gesondert
aufbewahrt
werden und technischen und organisatorischen Maßnahmen unterliegen, die gewährleisten, dass die
personenbezogenen Daten nicht einer identifizierten oder identifizierbaren natürlichen Person zugewiesen
werden.</p>
</li>
<li><h3>g) Verantwortlicher oder für die Verarbeitung Verantwortlicher</h3>
<p>Verantwortlicher oder für die Verarbeitung Verantwortlicher ist die natürliche oder juristische Person,
Behörde, Einrichtung oder andere Stelle, die allein oder gemeinsam mit anderen über die Zwecke und
Mittel
der Verarbeitung von personenbezogenen Daten entscheidet. Sind die Zwecke und Mittel dieser Verarbeitung
durch das Unionsrecht oder das Recht der Mitgliedstaaten vorgegeben, so kann der Verantwortliche
beziehungsweise können die bestimmten Kriterien seiner Benennung nach dem Unionsrecht oder dem Recht der
Mitgliedstaaten vorgesehen werden.</p>
</li>
<li><h3>h) Auftragsverarbeiter</h3>
<p>Auftragsverarbeiter ist eine natürliche oder juristische Person, Behörde, Einrichtung oder andere
Stelle,
die personenbezogene Daten im Auftrag des Verantwortlichen verarbeitet.</p>
</li>
<li><h3>i) Empfänger</h3>
<p>Empfänger ist eine natürliche oder juristische Person, Behörde, Einrichtung oder andere Stelle, der
personenbezogene Daten offengelegt werden, unabhängig davon, ob es sich bei ihr um einen Dritten handelt
oder nicht. Behörden, die im Rahmen eines bestimmten Untersuchungsauftrags nach dem Unionsrecht oder dem
Recht der Mitgliedstaaten möglicherweise personenbezogene Daten erhalten, gelten jedoch nicht als
Empfänger.</p>
</li>
<li><h3>j) Dritter</h3>
<p>Dritter ist eine natürliche oder juristische Person, Behörde, Einrichtung oder andere Stelle außer der
betroffenen Person, dem Verantwortlichen, dem Auftragsverarbeiter und den Personen, die unter der
unmittelbaren Verantwortung des Verantwortlichen oder des Auftragsverarbeiters befugt sind, die
personenbezogenen Daten zu verarbeiten.</p>
</li>
<li><h3>k) Einwilligung</h3>
<p>Einwilligung ist jede von der betroffenen Person freiwillig für den bestimmten Fall in informierter
Weise
und unmissverständlich abgegebene Willensbekundung in Form einer Erklärung oder einer sonstigen
eindeutigen
bestätigenden Handlung, mit der die betroffene Person zu verstehen gibt, dass sie mit der Verarbeitung
der
sie betreffenden personenbezogenen Daten einverstanden ist.</p>
</li>
</ul>
<h3>2. Name und Anschrift des für die Verarbeitung Verantwortlichen</h3>
<p>Verantwortlicher im Sinne der Datenschutz-Grundverordnung, sonstiger in den Mitgliedstaaten der
Europäischen
Union geltenden Datenschutzgesetze und anderer Bestimmungen mit datenschutzrechtlichem Charakter ist:</p>
<p>
<NAME><br/>
Anton-Graff-Str. 28<br/>
D-01309 Dresden<br/>
<abbr title="Telefon">Tel:</abbr> +49 176 21534966<br/>
<abbr title="E-Mail-Adresse">E-Mail:</abbr>anja.wippler(a)icloud.com<br/>
Website: www.anjawippler.de
</p>
<h3>3. Name und Anschrift des Datenschutzbeauftragten</h3>
<p>Der Datenschutzbeauftragte des für die Verarbeitung Verantwortlichen ist:</p>
<p><NAME></p>
<p>
<NAME><br/>
Anton-Graff-Str. 28<br/>
D-01309 Dresden<br/>
<abbr title="Telefo">Tel:</abbr> +49 176 21534966<br/>
<abbr title="E-Mail-Adresse">E-Mail:</abbr>anja.wippler(a)icloud.com<br/>
Websiten: www.anjawippler.de
</p>
<p>Jede betroffene Person kann sich jederzeit bei allen Fragen und Anregungen zum Datenschutz direkt an den
Datenschutzbeauftragten wenden.</p>
<h3>4. Erfassung von allgemeinen Daten und Informationen</h3>
<p>Die Internetseite von <NAME> erfasst mit jedem Aufruf der Internetseite durch eine
betroffene Person oder ein automatisiertes System eine Reihe von allgemeinen Daten und Informationen. Diese
allgemeinen Daten und Informationen werden in den Logfiles des Servers gespeichert. Erfasst werden können
die
(1) verwendeten Browsertypen und Versionen, (2) das vom zugreifenden System verwendete Betriebssystem, (3)
die
Internetseite, von welcher ein zugreifendes System auf diese Internetseite gelangt (sogenannte Referrer),
(4)
die Unterwebseiten, welche über ein zugreifendes System auf diese Internetseite angesteuert werden, (5) das
Datum und die Uhrzeit eines Zugriffs auf die Internetseite, (6) eine Internet-Protokoll-Adresse
(IP-Adresse),
(7) der Internet-Service-Provider des zugreifenden Systems und (8) sonstige ähnliche Daten und
Informationen,
die der Gefahrenabwehr im Falle von Angriffen auf die informationstechnologischen Systeme dienen.</p>
<p>Bei der Nutzung dieser allgemeinen Daten und Informationen zieht <NAME> keine
Rückschlüsse auf die betroffene Person. Diese Informationen werden vielmehr benötigt, um (1) die Inhalte
dieser
Internetseite korrekt auszuliefern, (2) die Inhalte dieser Internetseite sowie die Werbung für diese zu
optimieren, (3) die dauerhafte Funktionsfähigkeit der informationstechnologischen Systeme und der Technik
diese Internetseite zu gewährleisten sowie (4) um Strafverfolgungsbehörden im Falle eines Cyberangriffes die
zur Strafverfolgung notwendigen Informationen bereitzustellen. Diese anonym erhobenen Daten und
Informationen
werden durch <NAME> daher einerseits statistisch und ferner mit dem Ziel
ausgewertet,
den Datenschutz und die Datensicherheit zu erhöhen, um letztlich ein optimales
Schutzniveau für die von uns verarbeiteten personenbezogenen Daten sicherzustellen. Die anonymen Daten der
Server-Logfiles werden getrennt von allen durch eine betroffene Person angegebenen personenbezogenen Daten
gespeichert.</p>
<h3>5. Routinemäßige Löschung und Sperrung von personenbezogenen Daten</h3>
<p>Der für die Verarbeitung Verantwortliche verarbeitet und speichert personenbezogene Daten der betroffenen
Person nur für den Zeitraum, der zur Erreichung des Speicherungszwecks erforderlich ist oder sofern dies
durch
den Europäischen Richtlinien- und Verordnungsgeber oder einen anderen Gesetzgeber in Gesetzen oder
Vorschriften,
welchen der für die Verarbeitung Verantwortliche unterliegt, vorgesehen wurde.</p>
<p>Entfällt der Speicherungszweck oder läuft eine vom Europäischen Richtlinien- und Verordnungsgeber oder
einem
anderen zuständigen Gesetzgeber vorgeschriebene Speicherfrist ab, werden die personenbezogenen Daten
routinemäßig und entsprechend den gesetzlichen Vorschriften gesperrt oder gelöscht.</p>
<h3>6. Rechte der betroffenen Person</h3>
<ul style={{ listStyle: "none" }}>
<li><h3>a) Recht auf Bestätigung</h3>
<p>Jede betroffene Person hat das vom Europäischen Richtlinien- und Verordnungsgeber eingeräumte Recht,
von
dem für die Verarbeitung Verantwortlichen eine Bestätigung darüber zu verlangen, ob sie betreffende
personenbezogene Daten verarbeitet werden. Möchte eine betroffene Person dieses Bestätigungsrecht in
Anspruch nehmen, kann sie sich hierzu jederzeit an einen Mitarbeiter des für die Verarbeitung
Verantwortlichen wenden.</p>
</li>
<li><h3>b) Recht auf Auskunft</h3>
<p>Jede von der Verarbeitung personenbezogener Daten betroffene Person hat das vom Europäischen
Richtlinien-
und Verordnungsgeber gewährte Recht, jederzeit von dem für die Verarbeitung Verantwortlichen
unentgeltliche
Auskunft über die zu seiner Person gespeicherten personenbezogenen Daten und eine Kopie dieser Auskunft
zu
erhalten. Ferner hat der Europäische Richtlinien- und Verordnungsgeber der betroffenen Person Auskunft
über
folgende Informationen zugestanden:</p>
<ul style={{ listStyle: "none" }}>
<li>die Verarbeitungszwecke</li>
<li>die Kategorien personenbezogener Daten, die verarbeitet werden</li>
<li>die Empfänger oder Kategorien von Empfängern, gegenüber denen die personenbezogenen Daten
offengelegt
worden sind oder noch offengelegt werden, insbesondere bei Empfängern in Drittländern oder bei
internationalen Organisationen
</li>
<li>falls möglich die geplante Dauer, für die die personenbezogenen Daten gespeichert werden, oder,
falls
dies nicht möglich ist, die Kriterien für die Festlegung dieser Dauer
</li>
<li>das Bestehen eines Rechts auf Berichtigung oder Löschung der sie betreffenden personenbezogenen
Daten
oder auf Einschränkung der Verarbeitung durch den Verantwortlichen oder eines Widerspruchsrechts gegen
diese Verarbeitung
</li>
<li>das Bestehen eines Beschwerderechts bei einer Aufsichtsbehörde</li>
<li>wenn die personenbezogenen Daten nicht bei der betroffenen Person erhoben werden: Alle verfügbaren
Informationen über die Herkunft der Daten
</li>
<li>das Bestehen einer automatisierten Entscheidungsfindung einschließlich Profiling gemäß Artikel 22
Abs.1
und 4 DS-GVO und — zumindest in diesen Fällen — aussagekräftige Informationen über die involvierte
Logik
sowie die Tragweite und die angestrebten Auswirkungen einer derartigen Verarbeitung für die betroffene
Person
</li>
</ul>
<p>Ferner steht der betroffenen Person ein Auskunftsrecht darüber zu, ob personenbezogene Daten an ein
Drittland oder an eine internationale Organisation übermittelt wurden. Sofern dies der Fall ist, so
steht
der betroffenen Person im Übrigen das Recht zu, Auskunft über die geeigneten Garantien im Zusammenhang
mit
der Übermittlung zu erhalten.</p>
<p>Möchte eine betroffene Person dieses Auskunftsrecht in Anspruch nehmen, kann sie sich hierzu jederzeit
an
einen Mitarbeiter des für die Verarbeitung Verantwortlichen wenden.</p>
</li>
<li><h3>c) Recht auf Berichtigung</h3>
<p>Jede von der Verarbeitung personenbezogener Daten betroffene Person hat das vom Europäischen
Richtlinien-
und Verordnungsgeber gewährte Recht, die unverzügliche Berichtigung sie betreffender unrichtiger
personenbezogener Daten zu verlangen. Ferner steht der betroffenen Person das Recht zu, unter
Berücksichtigung der Zwecke der Verarbeitung, die Vervollständigung unvollständiger personenbezogener
Daten
— auch mittels einer ergänzenden Erklärung — zu verlangen.</p>
<p>Möchte eine betroffene Person dieses Berichtigungsrecht in Anspruch nehmen, kann sie sich hierzu
jederzeit
an einen Mitarbeiter des für die Verarbeitung Verantwortlichen wenden.</p></li>
<li>
<h3>d) Recht auf Löschung (Recht auf Vergessen werden)</h3>
<p>Jede von der Verarbeitung personenbezogener Daten betroffene Person hat das vom Europäischen
Richtlinien-
und Verordnungsgeber gewährte Recht, von dem Verantwortlichen zu verlangen, dass die sie betreffenden
personenbezogenen Daten unverzüglich gelöscht werden, sofern einer der folgenden Gründe zutrifft und
soweit
die Verarbeitung nicht erforderlich ist:</p>
<ul style={{ listStyle: "none" }}>
<li>Die personenbezogenen Daten wurden für solche Zwecke erhoben oder auf sonstige Weise verarbeitet,
für
welche sie nicht mehr notwendig sind.
</li>
<li>Die betroffene Person widerruft ihre Einwilligung, auf die sich die Verarbeitung gemäß Art. 6 Abs. 1
Buchstabe a DS-GVO oder Art. 9 Abs. 2 Buchstabe a DS-GVO stützte, und es fehlt an einer anderweitigen
Rechtsgrundlage für die Verarbeitung.
</li>
<li>Die betroffene Person legt gemäß Art. 21 Abs. 1 DS-GVO Widerspruch gegen die Verarbeitung ein, und
es
liegen keine vorrangigen berechtigten Gründe für die Verarbeitung vor, oder die betroffene Person legt
gemäß Art. 21 Abs. 2 DS-GVO Widerspruch gegen die Verarbeitung ein.
</li>
<li>Die personenbezogenen Daten wurden unrechtmäßig verarbeitet.</li>
<li>Die Löschung der personenbezogenen Daten ist zur Erfüllung einer rechtlichen Verpflichtung nach dem
Unionsrecht oder dem Recht der Mitgliedstaaten erforderlich, dem der Verantwortliche unterliegt.
</li>
<li>Die personenbezogenen Daten wurden in Bezug auf angebotene Dienste der Informationsgesellschaft
gemäß
Art. 8 Abs. 1 DS-GVO erhoben.
</li>
</ul>
<p>Sofern einer der oben genannten Gründe zutrifft und eine betroffene Person die Löschung von
personenbezogenen Daten, die bei von <NAME> gespeichert sind, veranlassen möchte,
kann sie sich hierzu jederzeit an einen Mitarbeiter des für die Verarbeitung Verantwortlichen wenden.
Der
Mitarbeiter von <NAME> wird veranlassen, dass dem Löschverlangen unverzüglich
nachgekommen wird.</p>
<p>Wurden die personenbezogenen Daten von von <NAME> öffentlich gemacht und ist <NAME> als
Verantwortlicher gemäß Art. 17 Abs. 1 DS-GVO zur Löschung der personenbezogenen Daten
verpflichtet, so trifft <NAME> unter Berücksichtigung der verfügbaren
Technologie
und der Implementierungskosten angemessene Maßnahmen, auch technischer Art, um andere für die
Datenverarbeitung Verantwortliche, welche die veröffentlichten personenbezogenen Daten verarbeiten,
darüber
in Kenntnis zu setzen, dass die betroffene Person von diesen anderen für die Datenverarbeitung
Verantwortlichen die Löschung sämtlicher Links zu diesen personenbezogenen Daten oder von Kopien oder
Replikationen dieser personenbezogenen Daten verlangt hat, soweit die Verarbeitung nicht erforderlich
ist.
Der Mitarbeiter von <NAME> wird im Einzelfall das Notwendige veranlassen.</p>
</li>
<li><h3>e) Recht auf Einschränkung der Verarbeitung</h3>
<p>Jede von der Verarbeitung personenbezogener Daten betroffene Person hat das vom Europäischen
Richtlinien-
und Verordnungsgeber gewährte Recht, von dem Verantwortlichen die Einschränkung der Verarbeitung zu
verlangen, wenn eine der folgenden Voraussetzungen gegeben ist:</p>
<ul style={{ listStyle: "none" }}>
<li>Die Richtigkeit der personenbezogenen Daten wird von der betroffenen Person bestritten, und zwar für
eine Dauer, die es dem Verantwortlichen ermöglicht, die Richtigkeit der personenbezogenen Daten zu
überprüfen.
</li>
<li>Die Verarbeitung ist unrechtmäßig, die betroffene Person lehnt die Löschung der personenbezogenen
Daten
ab und verlangt stattdessen die Einschränkung der Nutzung der personenbezogenen Daten.
</li>
<li>Der Verantwortliche benötigt die personenbezogenen Daten für die Zwecke der Verarbeitung nicht
länger,
die betroffene Person benötigt sie jedoch zur Geltendmachung, Ausübung oder Verteidigung von
Rechtsansprüchen.
</li>
<li>Die betroffene Person hat Widerspruch gegen die Verarbeitung gem. Art. 21 Abs. 1 DS-GVO eingelegt
und
es
steht noch nicht fest, ob die berechtigten Gründe des Verantwortlichen gegenüber denen der betroffenen
Person überwiegen.
</li>
</ul>
<p>Sofern eine der oben genannten Voraussetzungen gegeben ist und eine betroffene Person die Einschränkung
von
personenbezogenen Daten, die bei von <NAME> gespeichert sind, verlangen möchte,
kann
sie sich hierzu jederzeit an einen Mitarbeiter des für die Verarbeitung Verantwortlichen wenden. Der
Mitarbeiter von <NAME> wird die Einschränkung der Verarbeitung veranlassen.</p>
</li>
<li><h3>f) Recht auf Datenübertragbarkeit</h3>
<p>Jede von der Verarbeitung personenbezogener Daten betroffene Person hat das vom Europäischen
Richtlinien-
und Verordnungsgeber gewährte Recht, die sie betreffenden personenbezogenen Daten, welche durch die
betroffene Person einem Verantwortlichen bereitgestellt wurden, in einem strukturierten, gängigen und
maschinenlesbaren Format zu erhalten. Sie hat außerdem das Recht, diese Daten einem anderen
Verantwortlichen
ohne Behinderung durch den Verantwortlichen, dem die personenbezogenen Daten bereitgestellt wurden, zu
übermitteln, sofern die Verarbeitung auf der Einwilligung gemäß Art. 6 Abs. 1 Buchstabe a DS-GVO oder
Art.
9
Abs. 2 Buchstabe a DS-GVO oder auf einem Vertrag gemäß Art. 6 Abs. 1 Buchstabe b DS-GVO beruht und die
Verarbeitung mithilfe automatisierter Verfahren erfolgt, sofern die Verarbeitung nicht für die
Wahrnehmung
einer Aufgabe erforderlich ist, die im öffentlichen Interesse liegt oder in Ausübung öffentlicher Gewalt
erfolgt, welche dem Verantwortlichen übertragen wurde.</p>
<p>Ferner hat die betroffene Person bei der Ausübung ihres Rechts auf Datenübertragbarkeit gemäß Art. 20
Abs.
1 DS-GVO das Recht, zu erwirken, dass die personenbezogenen Daten direkt von einem Verantwortlichen an
einen
anderen Verantwortlichen übermittelt werden, soweit dies technisch machbar ist und sofern hiervon nicht
die
Rechte und Freiheiten anderer Personen beeinträchtigt werden.</p>
<p>Zur Geltendmachung des Rechts auf Datenübertragbarkeit kann sich die betroffene Person jederzeit an
einen
Mitarbeiter von <NAME> wenden.</p>
</li>
<li>
<h3>g) Recht auf Widerspruch</h3>
<p>Jede von der Verarbeitung personenbezogener Daten betroffene Person hat das vom Europäischen
Richtlinien-
und Verordnungsgeber gewährte Recht, aus Gründen, die sich aus ihrer besonderen Situation ergeben,
jederzeit
gegen die Verarbeitung sie betreffender personenbezogener Daten, die aufgrund von Art. 6 Abs. 1
Buchstaben
e
oder f DS-GVO erfolgt, Widerspruch einzulegen. Dies gilt auch für ein auf diese Bestimmungen gestütztes
Profiling.</p>
<p>Die <NAME> verarbeitet die personenbezogenen Daten im Falle des Widerspruchs nicht
mehr, es sei denn, <NAME> kann zwingende schutzwürdige Gründe für die Verarbeitung nachweisen, die
den
Interessen, Rechten und Freiheiten der betroffenen Person überwiegen, oder die Verarbeitung dient der
Geltendmachung, Ausübung oder Verteidigung von Rechtsansprüchen.</p>
<p>Verarbeitet <NAME> personenbezogene Daten, um Direktwerbung zu betreiben, so
hat
die betroffene Person das Recht, jederzeit Widerspruch gegen die Verarbeitung der personenbezogenen
Daten
zum Zwecke derartiger Werbung einzulegen. Dies gilt auch für das Profiling, soweit es mit solcher
Direktwerbung in Verbindung steht. Widerspricht die betroffene Person gegenüber Anja
Wippler der Verarbeitung für Zwecke der Direktwerbung, so wird <NAME> die
personenbezogenen Daten nicht mehr für diese Zwecke verarbeiten.</p>
<p>Zudem hat die betroffene Person das Recht, aus Gründen, die sich aus ihrer besonderen Situation
ergeben,
gegen die sie betreffende Verarbeitung personenbezogener Daten, die bei von <NAME>
zu
wissenschaftlichen oder historischen Forschungszwecken oder zu statistischen Zwecken gemäß Art. 89 Abs.
1
DS-GVO erfolgen, Widerspruch einzulegen, es sei denn, eine solche Verarbeitung ist zur Erfüllung einer
im
öffentlichen Interesse liegenden Aufgabe erforderlich.</p>
<p>Zur Ausübung des Rechts auf Widerspruch kann sich die betroffene Person direkt jeden Mitarbeiter der
<NAME> oder einen anderen Mitarbeiter wenden. Der betroffenen Person steht es
ferner
frei, im Zusammenhang mit der Nutzung von Diensten der Informationsgesellschaft, ungeachtet der
Richtlinie
2002/58/EG, ihr Widerspruchsrecht mittels automatisierter Verfahren auszuüben, bei denen technische
Spezifikationen verwendet werden.</p>
</li>
<li><h3>h) Automatisierte Entscheidungen im Einzelfall einschließlich Profiling</h3>
<p>Jede von der Verarbeitung personenbezogener Daten betroffene Person hat das vom Europäischen
Richtlinien-
und Verordnungsgeber gewährte Recht, nicht einer ausschließlich auf einer automatisierten Verarbeitung —
einschließlich Profiling — beruhenden Entscheidung unterworfen zu werden, die ihr gegenüber rechtliche
Wirkung entfaltet oder sie in ähnlicher Weise erheblich beeinträchtigt, sofern die Entscheidung (1)
nicht
für den Abschluss oder die Erfüllung eines Vertrags zwischen der betroffenen Person und dem
Verantwortlichen
erforderlich ist, oder (2) aufgrund von Rechtsvorschriften der Union oder der Mitgliedstaaten, denen der
Verantwortliche unterliegt, zulässig ist und diese Rechtsvorschriften angemessene Maßnahmen zur Wahrung
der
Rechte und Freiheiten sowie der berechtigten Interessen der betroffenen Person enthalten oder (3) mit
ausdrücklicher Einwilligung der betroffenen Person erfolgt.</p>
<p>Ist die Entscheidung (1) für den Abschluss oder die Erfüllung eines Vertrags zwischen der betroffenen
Person und dem Verantwortlichen erforderlich oder (2) erfolgt sie mit ausdrücklicher Einwilligung der
betroffenen Person, trifft <NAME> angemessene Maßnahmen, um die Rechte und
Freiheiten sowie die berechtigten Interessen der betroffenen Person zu wahren, wozu mindestens das Recht
auf
Erwirkung des Eingreifens einer Person seitens des Verantwortlichen, auf Darlegung des eigenen
Standpunkts
und auf Anfechtung der Entscheidung gehört.</p>
<p>Möchte die betroffene Person Rechte mit Bezug auf automatisierte Entscheidungen geltend machen, kann
sie
sich hierzu jederzeit an einen Mitarbeiter des für die Verarbeitung Verantwortlichen wenden.</p>
</li>
<li><h3>i) Recht auf Widerruf einer datenschutzrechtlichen Einwilligung</h3>
<p>Jede von der Verarbeitung personenbezogener Daten betroffene Person hat das vom Europäischen
Richtlinien-
und Verordnungsgeber gewährte Recht, eine Einwilligung zur Verarbeitung personenbezogener Daten
jederzeit
zu
widerrufen.</p>
<p>Möchte die betroffene Person ihr Recht auf Widerruf einer Einwilligung geltend machen, kann sie sich
hierzu
jederzeit an einen Mitarbeiter des für die Verarbeitung Verantwortlichen wenden.</p>
</li>
</ul>
<h3>7. Rechtsgrundlage der Verarbeitung</h3>
<p>Art. 6 I lit. a DS-GVO dient <NAME> als Rechtsgrundlage für Verarbeitungsvorgänge, bei denen wir
eine Einwilligung für einen bestimmten Verarbeitungszweck einholen. Ist die Verarbeitung personenbezogener
Daten
zur Erfüllung eines Vertrags, dessen Vertragspartei die betroffene Person ist, erforderlich, wie dies
beispielsweise bei Verarbeitungsvorgängen der Fall ist, die für eine Lieferung von Waren oder die Erbringung
einer sonstigen Leistung oder Gegenleistung notwendig sind, so beruht die Verarbeitung auf Art. 6 I lit. b
DS-GVO. Gleiches gilt für solche Verarbeitungsvorgänge die zur Durchführung vorvertraglicher Maßnahmen
erforderlich sind, etwa in Fällen von Anfragen zur Produkten oder Leistungen. Unterliegt <NAME>
einer rechtlichen Verpflichtung durch welche eine Verarbeitung von personenbezogenen Daten
erforderlich wird, wie beispielsweise zur Erfüllung steuerlicher Pflichten, so basiert die Verarbeitung auf
Art.
6 I lit. c DS-GVO. In seltenen Fällen könnte die Verarbeitung von personenbezogenen Daten erforderlich
werden,
um lebenswichtige Interessen der betroffenen Person oder einer anderen natürlichen Person zu schützen. Dies
wäre
beispielsweise der Fall, wenn ein Besucher in <NAME>ers Betrieb verletzt werden würde und daraufhin sein
Name,
sein Alter, seine Krankenkassendaten oder sonstige lebenswichtige Informationen an einen Arzt, ein
Krankenhaus
oder sonstige Dritte weitergegeben werden müssten. Dann würde die Verarbeitung auf Art. 6 I lit. d DS-GVO
beruhen.
Letztlich könnten Verarbeitungsvorgänge auf Art. 6 I lit. f DS-GVO beruhen. Auf dieser Rechtsgrundlage
basieren
Verarbeitungsvorgänge, die von keiner der vorgenannten Rechtsgrundlagen erfasst werden, wenn die
Verarbeitung
zur Wahrung eines berechtigten Interesses von <NAME> oder eines Dritten erforderlich ist, sofern die
Interessen, Grundrechte und Grundfreiheiten des Betroffenen nicht überwiegen. Solche Verarbeitungsvorgänge
sind
uns insbesondere deshalb gestattet, weil sie durch den Europäischen Gesetzgeber besonders erwähnt wurden. Er
vertrat insoweit die Auffassung, dass ein berechtigtes Interesse anzunehmen sein könnte, wenn die betroffene
Person ein Kunde des Verantwortlichen ist (Erwägungsgrund 47 Satz 2 DS-GVO).
</p>
<h3>8. Berechtigte Interessen an der Verarbeitung, die von dem Verantwortlichen oder einem Dritten verfolgt
werden</h3>
<p>Basiert die Verarbeitung personenbezogener Daten auf Artikel 6 I lit. f DS-GVO ist <NAME>ers
berechtigtes
Interesse
die Durchführung ihrer Geschäftstätigkeit zugunsten des Wohlergehens all ihrer Mitarbeiter und
Anteilseigner.</p>
<h3>9. Dauer, für die die personenbezogenen Daten gespeichert werden</h3>
<p>Das Kriterium für die Dauer der Speicherung von personenbezogenen Daten ist die jeweilige gesetzliche
Aufbewahrungsfrist. Nach Ablauf der Frist werden die entsprechenden Daten routinemäßig gelöscht, sofern sie
nicht mehr zur Vertragserfüllung oder Vertragsanbahnung erforderlich sind.</p>
<h3>10. Gesetzliche oder vertragliche Vorschriften zur Bereitstellung der personenbezogenen Daten;
Erforderlichkeit für den Vertragsabschluss; Verpflichtung der betroffenen Person, die personenbezogenen
Daten
bereitzustellen; mögliche Folgen der Nichtbereitstellung</h3>
<p>Wir klären Sie darüber auf, dass die Bereitstellung personenbezogener Daten zum Teil gesetzlich
vorgeschrieben
ist (z.B. Steuervorschriften) oder sich auch aus vertraglichen Regelungen (z.B. Angaben zum Vertragspartner)
ergeben kann.
Mitunter kann es zu einem Vertragsschluss erforderlich sein, dass eine betroffene Person uns
personenbezogene
Daten zur Verfügung stellt, die in der Folge durch uns verarbeitet werden müssen. Die betroffene Person ist
beispielsweise verpflichtet uns personenbezogene Daten bereitzustellen, wenn <NAME> mit ihr einen
Vertrag abschließt. Eine Nichtbereitstellung der personenbezogenen Daten hätte zur Folge, dass der Vertrag
mit
dem Betroffenen nicht geschlossen werden könnte.
Vor einer Bereitstellung personenbezogener Daten durch den Betroffenen muss sich der Betroffene an Anja
Wippler wenden. Unser Mitarbeiter klärt den Betroffenen einzelfallbezogen darüber auf, ob die
Bereitstellung
der personenbezogenen Daten gesetzlich oder vertraglich vorgeschrieben oder für den Vertragsabschluss
erforderlich ist, ob eine Verpflichtung besteht, die personenbezogenen Daten bereitzustellen, und welche
Folgen die Nichtbereitstellung der personenbezogenen Daten hätte.
</p>
<h3>11. Bestehen einer automatisierten Entscheidungsfindung</h3>
<p>Als verantwortungsbewusste Einzelperson verzichtet <NAME> auf eine automatische Entscheidungsfindung
oder ein Profiling.</p>
<p>Diese Datenschutzerklärung wurde durch den Datenschutzerklärungs-Generator der DGD Deutsche Gesellschaft
für
Datenschutz GmbH, die als <a
href="https://dg-datenschutz.de/datenschutz-dienstleistungen/externer-datenschutzbeauftragter/">Externer
Datenschutzbeauftragter Köln</a> tätig ist, in Kooperation mit dem <a href="https://www.wbs-law.de/">Kölner
Anwalt für Datenschutzrecht <NAME></a> erstellt.
</p>
</Content>
</Container>
</React.Fragment>
)
}
|
#!/bin/bash
set -e
cargo clean
# verify using KLEE
# this should detect an error
cargo-verify --tests --verbose | tee out1 || true
grep -q -F "test t1 ... ASSERT_FAILED" out1
# replay input values
cargo-verify --tests --replay | tee out2 || true
grep -q -F "Test values: a = 1000, b = 1000" out2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.