text stringlengths 1 1.05M |
|---|
<filename>src/zwave/GenericZWaveMapperRegistry.h
#pragma once
#include <iosfwd>
#include <map>
#include "model/ModuleID.h"
#include "model/ModuleType.h"
#include "util/Loggable.h"
#include "zwave/ZWaveMapperRegistry.h"
namespace BeeeOn {
/**
* @brief Certain Z-Wave nodes can be mapped to the BeeeOn system generically.
* This allows to support any node type without knowing it in advance. The
* GenericZWaveMapperRegistry implements this approach.
*
* Devices resolved by the GenericZWaveMapperRegistry would have mangled device
* ID with 0xff in the top bits of the ident part.
*
* Reported devices resolved by the GenericZWaveMapperRegistry would have a
* product name with appended string " (generic)". Thus, such devices can be
* distinguished easily from specific implementations that might have different
* module types.
*/
class GenericZWaveMapperRegistry :
public ZWaveMapperRegistry,
Loggable {
public:
class GenericMapper : public ZWaveMapperRegistry::Mapper {
public:
typedef Poco::SharedPtr<GenericMapper> Ptr;
/**
* @brief Mangle bits to be injected into device ID of Z-Wave
* devices that are using the GenericMapper.
*/
static uint8_t ID_MANGLE_BITS;
GenericMapper(const ZWaveNode::Identity &id, const std::string &product);
/**
* @returns device ID for the given Z-Wave identity mangled by
* the ID_MANGLE_BITS (0xff).
*/
DeviceID buildID() const override;
/**
* @returns product name of the ZWaveNode with appended " (generic)"
* string to be easily distinguishable.
*/
std::string product() const override;
/**
* @returns resolved module types
*/
std::list<ModuleType> types() const override;
SensorValue convert(const ZWaveNode::Value &value) const override;
ZWaveNode::Value convert(const ModuleID &id, double value) const override;
/**
* @brief Record the given association between the given Z-Wave
* command class and BeeeOn ModuleType. When calling this method
* mutiple times, the order corresponds to an increasing ModuleID.
*/
void mapType(
const ZWaveNode::CommandClass &cc,
const ModuleType &type);
protected:
/**
* @brief Throw an exception when the given value cannot be converted
* into the BeeeOn representation.
*/
void cannotConvert(const ZWaveNode::Value &value) const;
private:
std::map<ZWaveNode::CommandClass, ModuleID> m_mapping;
std::map<ModuleID, ModuleType> m_modules;
};
GenericZWaveMapperRegistry();
/**
* @brief Load XML file with the types mapping between Z-Wave and BeeeOn.
*/
void loadTypesMapping(const std::string &file);
void loadTypesMapping(std::istream &in);
/**
* @breif Map the given ZWaveNode instance on-fly to the BeeeOn system
* by using the GenericMapper.
*/
Mapper::Ptr resolve(const ZWaveNode &node) override;
private:
/**
* The m_typesMapping maps Z-Wave command classes to BeeeOn types.
*/
std::map<std::pair<uint8_t, uint8_t>, ModuleType> m_typesMapping;
/**
* The m_typesOrder maintains backwards compatibility of the m_typesMapping and GenericMapper.
* If a new data type is added, it MUST be appended to the end of the m_typesOrder and its
* value must be incremented by 1 from the last one.
*
* The values as discovered from Z-Wave nodes must be always in the same order to
* have a stable mapping to BeeeOn modules of a device. When all values are reported
* for a Z-Wave device, they are sorted according to the m_typesOrder.
*/
std::map<std::pair<uint8_t, uint8_t>, unsigned int> m_typesOrder;
};
}
|
<reponame>Sphereon-Opensource/factom-identity-java
package com.sphereon.factom.identity.did.parse;
import org.blockchain_innovation.factom.client.api.model.Entry;
import org.blockchain_innovation.factom.client.api.ops.StringUtils;
import java.util.Optional;
/**
* A rule that asserts whether the 2nd external Id from an antry denoting the scheme version is supported
*/
public class EntrySchemeVersionRule extends AbstractEntryRule<String> {
private final String semver;
public EntrySchemeVersionRule(Entry entry, String semver) {
super(entry);
this.semver = semver;
}
@Override
public String execute() throws RuleException {
if (StringUtils.isEmpty(getSemver())) {
throw new RuleException("A semantic version needs to be defined for the Entry scheme version rule");
}
assertEntry();
new ExternalIdsSizeRule(getEntry(), Optional.of(2)).execute();
String extId = getEntry().getExternalIds().get(1);
if (!getSemver().equals(extId)) {
throw new RuleException("Entry scheme version in external id with version '%s' was not equal to required version '%s'", extId, getSemver());
}
return extId;
}
public String getSemver() {
return semver;
}
}
|
<filename>cmd/keys.go
package cmd
import (
"fmt"
"os"
"strings"
"text/tabwriter"
"github.com/spf13/cobra"
"github.com/dmolesUC3/cos/internal/keys"
"github.com/dmolesUC3/cos/internal/logging"
"github.com/dmolesUC3/cos/pkg"
)
const (
usageKeys = "keys <BUCKET-URL>"
shortDescKeys = "keys: test the keys supported by an object storage endpoint"
longDescKeys = shortDescKeys + `
Creates, retrieves, verifies, and deletes a small object for each value
in the specified key list. By default, keys outputs only failed keys, to
standard output, writing each key as a quoted Go string literal
(see https://golang.org/pkg/strconv/).
Use the --raw option to write the keys without quoting or escaping; note
that this may produce confusing results if any of the keys contain
newlines.
Use the --ok option to write successful keys to a file, and the --bad
option (or shell redirection) to write failed keys to a file instead of to
standard output.
Use the --list option to select one of the built-in "standard" key lists.
Use the --file option to specify a file containing keys to test, one key per
file, separated by newlines (LF, \n).
Available lists:
`
exampleKeys = `
cos keys --endpoint https://s3.us-west-2.amazonaws.com/s3://www.dmoles.net/
cos keys --list naughty-strings --endpoint https://s3.us-west-2.amazonaws.com/ s3://www.dmoles.net/
cos keys --raw --ok ok.txt --bad bad.txt --endpoint https://s3.us-west-2.amazonaws.com/ s3://www.dmoles.net/
cos keys --file my-keys.txt --endpoint https://s3.us-west-2.amazonaws.com/ s3://www.dmoles.net/
cos keys --sample 100 --file my-keys.txt --raw --ok ok.txt --bad bad.txt --endpoint https://s3.us-west-2.amazonaws.com/ s3://www.dmoles.net/
`
)
func longDescription() string {
listList, err := availableKeyLists()
if err != nil {
panic(err)
}
longDesc := longDescKeys + "\n" + *listList
longDescription := logging.Untabify(longDesc, "")
return longDescription
}
func availableKeyLists() (*string, error) {
var sb strings.Builder
w := tabwriter.NewWriter(&sb, 0, 0, 2, ' ', tabwriter.DiscardEmptyColumns)
for i, list := range keys.KnownKeyLists() {
_, err := fmt.Fprintf(w, "%d.\t%v\t%v (%d keys)\n", i+1, list.Name(), list.Desc(), list.Count())
if err != nil {
return nil, err
}
}
err := w.Flush()
if err != nil {
return nil, err
}
listList := sb.String()
return &listList, nil
}
func init() {
f := keysFlags{}
cmd := &cobra.Command{
Use: usageKeys,
Short: shortDescKeys,
Long: longDescription(),
Args: cobra.ExactArgs(1),
Example: logging.Untabify(exampleKeys, " "),
Run: func(cmd *cobra.Command, args []string) {
err := checkKeys(args[0], f)
if err != nil {
_, _ = fmt.Fprintln(os.Stderr, err)
}
},
}
cmdFlags := cmd.Flags()
f.AddTo(cmdFlags)
cmdFlags.BoolVar(&f.Raw, "raw", false, "write keys in raw (unquoted) format")
cmdFlags.StringVarP(&f.OkFile, "ok", "o", "", "write successful (\"OK\") keys to specified file")
cmdFlags.StringVarP(&f.BadFile, "bad", "b", "", "write failed (\"bad\") keys to specified file")
cmdFlags.StringVarP(&f.ListName, "list", "l", keys.DefaultKeyListName, "key list to check")
cmdFlags.StringVarP(&f.KeyFile, "file", "f", "", "file of keys to check")
cmdFlags.IntVarP(&f.Sample, "sample", "s", 0, "sample size, or 0 for all keys")
rootCmd.AddCommand(cmd)
}
func checkKeys(bucketStr string, f keysFlags) error {
logger := logging.DefaultLoggerWithLevel(f.LogLevel())
logger.Tracef("flags: %v\n", f)
logger.Tracef("bucket URL: %v\n", bucketStr)
target, err := f.Target(bucketStr)
if err != nil {
return err
}
keyList, err := f.KeyList()
if err != nil {
return err
}
okOut, badOut, err := f.Outputs()
if err != nil {
return err
}
k := pkg.NewKeys(target, keyList)
failures, err := k.CheckAll(okOut, badOut, f.Raw)
if err != nil {
return err
}
failureCount := len(failures)
if failureCount > 0 {
return fmt.Errorf("%v: %d of %d keys failed", keyList.Name(), failureCount, keyList.Count())
}
return nil
}
|
import prisma from '../../../lib/prisma';
import { NextApiRequest, NextApiResponse } from "next";
import { getSession } from 'next-auth/client';
export default async (req: NextApiRequest, res: NextApiResponse) => {
const session = await getSession({ req })
const {
body: { contactId, title, content },
method,
} = req
if(!session) {
res.status(403).end('Unauthorized route')
}
try {
if(method === 'POST') {
const user = await prisma.comment.create({
data: {
contactId: contactId as string,
title: title as string,
content: content as string
},
});
res.status(200).json(user);
}
res.status(405).end(`Method ${method} Not Allowed`);
} catch (error) {
res.status(500).send({ error: 'API failed' })
}
}
|
<reponame>levsthings/somnolence<gh_stars>0
export default {
UPDATE_USER_COMMAND: 'UPDATE_USER_COMMAND',
REGISTER_USER_COMMAND: 'REGISTER_USER_COMMAND',
ADD_TO_HISTORY: 'ADD_TO_HISTORY'
}
|
<gh_stars>0
package lock
import (
"context"
"github.com/brianvoe/gofakeit/v6"
"github.com/go-redis/redis/v8"
"sync"
"time"
)
type RedLock struct {
clients []*redis.Client // Redis客户端
successClients []*redis.Client // 加锁成功的客户端
script *redis.Script // 解锁脚本
resource string // 锁定的资源
randomValue string // 随机值
watchDog chan struct{} // 看门狗
}
func NewRedLock(clients []*redis.Client, resource string) *RedLock {
return &RedLock{
clients: clients,
script: redis.NewScript(unlockScript),
resource: resource,
}
}
func (l *RedLock) TryLock(ctx context.Context) error {
randomValue := gofakeit.UUID()
var wg sync.WaitGroup
wg.Add(len(l.clients))
// 成功获得锁的Redis实例的客户端
successClients := make(chan *redis.Client, len(l.clients))
for _, client := range l.clients {
go func(client *redis.Client) {
defer wg.Done()
success, err := client.SetNX(ctx, l.resource, randomValue, ttl).Result()
if err != nil {
return
}
// 加锁失败
if !success {
return
}
// 加锁成功,启动看门狗
go l.startWatchDog()
successClients <- client
}(client)
}
// 等待所有获取锁操作完成
wg.Wait()
close(successClients)
// 如果成功加锁得客户端少于客户端数量的一半+1,表示加锁失败
if len(successClients) < len(l.clients)/2+1 {
// 就算加锁失败,也要把已经获得的锁给释放掉
for client := range successClients {
go func(client *redis.Client) {
ctx, _ := context.WithTimeout(context.Background(), ttl)
l.script.Run(ctx, client, []string{l.resource}, randomValue)
}(client)
}
return ErrLockFailed
}
// 加锁成功,启动看门狗
l.randomValue = randomValue
l.successClients = nil
for successClient := range successClients {
l.successClients = append(l.successClients, successClient)
}
return nil
}
func (l *RedLock) startWatchDog() {
l.watchDog = make(chan struct{})
ticker := time.NewTicker(resetTTLInterval)
for {
select {
case <-ticker.C:
// 延长锁的过期时间
for _, client := range l.successClients {
go func(client *redis.Client) {
ctx, _ := context.WithTimeout(context.Background(), ttl-resetTTLInterval)
client.Expire(ctx, l.resource, ttl)
}(client)
}
case <-l.watchDog:
// 已经解锁
return
}
}
}
func (l *RedLock) Unlock(ctx context.Context) error {
for _, client := range l.successClients {
go func(client *redis.Client) {
l.script.Run(ctx, client, []string{l.resource}, l.randomValue)
}(client)
}
// 关闭看门狗
close(l.watchDog)
return nil
}
|
package org.mengyun.tcctransaction.repository;
import org.mengyun.tcctransaction.Transaction;
import javax.transaction.xa.Xid;
import java.io.Closeable;
import java.util.Date;
/**
* 事务存储接口,不同的存储器通过实现该接口,提供事务的增删改查功能。
* Created by changmingxie on 11/12/15.
*/
public interface TransactionRepository extends Closeable {
String getDomain();
String getRootDomain();
int create(Transaction transaction);
int update(Transaction transaction);
int delete(Transaction transaction);
Transaction findByXid(Xid xid);
Transaction findByRootXid(Xid xid);
Page<Transaction> findAllUnmodifiedSince(Date date, String offset, int pageSize);
@Override
default void close() {
}
}
|
<reponame>Project-Timer/FrontTimer
import {Component, Input, OnInit} from '@angular/core';
@Component({
selector: 'ngx-group-list',
templateUrl: './group-list.component.html',
styleUrls: ['./group-list.component.scss'],
})
export class GroupListComponent implements OnInit {
@Input() groups: any[];
@Input() show: boolean;
constructor() {
}
ngOnInit() {
this.show = false;
}
}
|
#!/usr/bin/env bash
canonical() {
local d="$(dirname ${1})"
local f="$(basename ${1})"
(
cd ${d} >/dev/null 2>&1
while [ -h "${f}" ] ; do
cd $(dirname $(readlink ${f})) >/dev/null 2>&1
done
pwd -P
)
}
project_dirname=$(canonical ./)
export EZ_BUILD_BIN="${project_dirname}/bin/ez-build.js"
ez-build() {
echo "argc: ${EZ_BUILD_BIN}"
echo "argv: ${@}"
run ${EZ_BUILD_BIN} ${@}
}
assert_success() {
if [[ "${status}" != 0 ]]; then
echo "-- command failed but was expected to succeed"
echo "status : ${status}"
echo "output : ${output}"
echo "--"
return 1
fi
}
assert_failure() {
if [[ "${status}" == 0 ]]; then
echo "-- command succeded but was expected to fail"
echo "status : ${status}"
echo "output : ${output}"
echo "--"
return 1
fi
}
counter=0
assert_expected() {
outdir="${BATS_TEST_DIRNAME}/expected"
outfile="$(basename ${BATS_TEST_FILENAME} .bats)"
outtest="$(echo ${BATS_TEST_DESCRIPTION} | sed -e 's/[^A-Za-z0-9._-]/_/g')"
(( counter+=1 ))
expected="${outdir}/${outfile}--${outtest}--${counter}"
if [[ ! -f ${expected} ]]; then
mkdir -p "${outdir}"
echo "${1}" > "${expected}"
else
assert_equal "$(cat ${expected})" "${1}"
fi
}
assert_equal() {
if [[ "${1}" == "--eval" ]]; then
shift
expected=(${!1})
actual=(${!2})
else
expected="${1}"
actual="${2}"
fi
diff=$(echo ${expected[@]} ${actual[@]} | tr ' ' '\n' | sort | uniq -u)
if [[ -z "${diff}" ]]; then
return 0
else
echo "-- not equal"
echo "expected : ${expected[@]}"
echo "actual : ${actual[@]}"
echo "diff : ${diff[@]}"
echo "--"
return 1
fi
}
assert_output() {
if [[ "${1}" == "--eval" ]]; then
shift
expected=(${!1})
else
expected="${1}"
fi
assert_equal "${expected}" "${output}"
}
assert_exists() {
for file in ${@}; do
if [[ ! -e ${file} ]]; then
echo "-- file does not exist"
echo "expected : ${file}"
echo "actual : does not exist"
echo "--"
return 1
fi
done
}
refute_exists() {
for file in ${@}; do
if [[ -e ${file} ]]; then
echo "-- file exists"
echo "expected : no file"
echo "actual : ${file}"
echo "--"
return 1
fi
done
}
assert_contains() {
if [[ "${1}" == "--eval" ]]; then
shift
expected=(${!1})
actual=(${!2})
else
expected="${1}"
actual="${2}"
fi
if [[ "${actual}" == *"${expected}"* ]]; then
return 0
else
echo "-- expected string not found"
echo "expected : ${expected[@]}"
echo "actual : ${actual[@]}"
echo "--"
return 1
fi
}
assert_output_contains() {
if [[ "${1}" == "--eval" ]]; then
shift
expected=(${!1})
else
expected="${1}"
fi
assert_contains "${expected}" "${output}"
}
load_fixture() {
fixture="${project_dirname}/test/fixtures/${1}"
if [[ -d "${fixture}" ]]; then
pushd "${fixture}"
if [[ ! -d node_modules ]]; then
npm install
npm link "${project_dirname}"
fi
else
echo "unknown fixture: ${fixture}"
return 1
fi
}
clean_fixture() {
fixture="${project_dirname}/test/fixtures/${1}"
if [[ -d "${fixture}" ]]; then
git clean -dfx -e node_modules -e "*.pid" -e "*.log" -- "${fixture}"
else
echo "unknown fixture: ${fixture}"
return 1
fi
}
unload_fixture() {
fixture="${project_dirname}/test/fixtures/${1}"
if [[ -d "${fixture}" ]]; then
clean_fixture "${1}"
popd
else
echo "unknown fixture: ${fixture}"
return 1
fi
}
eventually() {
for try in $(seq 1 30); do
set +e
output="$(eval ${@})"
status=$?
set -e
if [[ ${status} == 0 ]]; then
return 0
fi
sleep 1
done
echo "${output}"
return ${status}
}
|
import React from "react";
import Header from "../../components/Header/Header";
import SideBar from "../../components/SideBar/SideBar";
import "./DashboardLayout.styles.scss";
export default function DashboardLayout({ children }) {
return (
<main className="dashboard-layout" data-modal="body">
<SideBar />
<section
className="dashboard-content"
title="dashboard-content"
data-mobile="body"
>
<Header />
{children}
</section>
</main>
);
}
|
<filename>library/src/main/java/com/livetyping/library/interfaces/CannyTransition.java
package com.livetyping.library.interfaces;
import android.support.transition.Transition;
import android.view.View;
public interface CannyTransition {
Transition getTransition(View inChild, View outChild);
} |
#!/bin/bash
#============================================================
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part2.sh
# Description: OpenWrt DIY script part 2 (After Update feeds)
# Lisence: MIT
# Author: P3TERX
# Blog: https://p3terx.com
#============================================================
# Modify default IP
sed -i 's/192.168.1.1/10.10.10.252/g' package/base-files/files/bin/config_generate
|
#!/usr/bin/env bash
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# This script is invoked by Jenkins and triggers a test run based on
# env variable settings.
#
# Setting up rvm environment BEFORE we set -ex.
[[ -s /etc/profile.d/rvm.sh ]] && . /etc/profile.d/rvm.sh
# To prevent cygwin bash complaining about empty lines ending with \r
# we set the igncr option. The option doesn't exist on Linux, so we fallback
# to just 'set -ex' there.
# NOTE: No empty lines should appear in this file before igncr is set!
set -ex -o igncr || set -ex
if [ "$platform" == "linux" ]
then
PLATFORM_SPECIFIC_ARGS="--use_docker --measure_cpu_costs"
elif [ "$platform" == "freebsd" ]
then
export MAKE=gmake
fi
unset platform # variable named 'platform' breaks the windows build
python tools/run_tests/run_tests.py \
$PLATFORM_SPECIFIC_ARGS \
-t \
-l $language \
-c $config \
-x report.xml \
-j 2 \
$@ || TESTS_FAILED="true"
if [ ! -e reports/index.html ]
then
mkdir -p reports
echo 'No reports generated.' > reports/index.html
fi
if [ "$TESTS_FAILED" != "" ]
then
exit 1
fi
|
#! /vendor/bin/sh
#
# Copyright (c) 2019-2021 Qualcomm Technologies, Inc.
# All Rights Reserved.
# Confidential and Proprietary - Qualcomm Technologies, Inc.
#
# Copyright (c) 2019 The Linux Foundation. All rights reserved.
#
export PATH=/vendor/bin
soc_id=`getprop ro.vendor.qti.soc_id`
if [ "$soc_id" -eq 415 ] || [ "$soc_id" -eq 439 ] || [ "$soc_id" -eq 450 ] || [ "$soc_id" -eq 475 ] || [ "$soc_id" -eq 515 ]; then
setprop persist.vendor.hvdcp_opti.start 2
exit 0
fi
if [ "$soc_id" -eq 441 ] || [ "$soc_id" -eq 471 ]; then
#Scuba does not support usb-pd or charge pumps
find /sys/class/power_supply/battery/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/power_supply/bms/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/power_supply/main/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/power_supply/usb/ -type f -maxdepth 1 | xargs chown system.system
else
find /sys/class/power_supply/battery/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/power_supply/bms/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/power_supply/main/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/power_supply/usb/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/power_supply/charge_pump_master/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/power_supply/pc_port/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/power_supply/dc/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/power_supply/parallel/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/usbpd/usbpd0/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/qc-vdm/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/charge_pump/ -type f -maxdepth 1 | xargs chown system.system
find /sys/class/qcom-battery/ -type f -maxdepth 1 | xargs chown system.system
for i in 0 1 2 3 4 5 6 7 8 9
do
devname=`cat /sys/bus/iio/devices/iio:device$i/name`
if [[ "$devname" == *smb* ]] || [[ "$devname" == *qg* ]] || [[ "$devname" == *div2_cp* ]] || [[ "$devname" == *div2-cp* ]]; then
find /sys/bus/iio/devices/iio:device$i/ -type f -maxdepth 1 | xargs chown system.system
fi
done
fi
setprop persist.vendor.hvdcp_opti.start 1
|
<reponame>ojandali93/NEXXIST_B1
import React from 'react'
export default function PropertyListMetrics() {
return (
<div className="property-rev-exp-contianer">
<h2>Key Investment Metrics:</h2>
<div className="metrics-section">
<div className="metrics-item">
<p>(i)</p>
<p>CF:</p>
<p>$10,234</p>
</div>
<div className="metrics-item">
<p>(i)</p>
<p>NOI:</p>
<p>$10,234</p>
</div>
<div className="metrics-item">
<p>(i)</p>
<p>ROI:</p>
<p>6.73%</p>
</div>
</div>
</div>
)
}
|
def manipulateString(string):
string = string.replace('a', '').replace('e', 'X').upper()
return string
print(manipulateString(string)) |
eval `opam config env`
opam depext -uiy mirage
cd ~
git clone https://github.com/mirage/mirage-skeleton.git
make -C mirage-skeleton && rm -rf mirage-skeleton
|
"""Demonstrates usage of *args and **kwargs"""
def my_func(*args, **kwargs):
"""Prints arguments passed to the my_func() function"""
for arg in args:
print("Argument passed:", arg)
for key, value in kwargs.items():
print("{0} = {1}".format(key,value))
a = 1
b = 2
my_dict = {'firstname': 'John', 'lastname': 'Doe'}
my_func(a, b, **my_dict) |
import { connect } from 'react-redux'
export function mapStateToProps (state, props) {
let isPublic = props.isPublic
let communitiesPlusPublic = props.communities
if (isPublic) {
communitiesPlusPublic.unshift({ name: 'Public', id: 'public', avatarUrl: '/public-icon.svg', slug: 'public' })
}
return {
communities: communitiesPlusPublic
}
}
export function mapDispatchToProps (dispatch, props) {
return {}
}
export default connect(mapStateToProps, mapDispatchToProps)
|
#include "sudoku_player.h"
#include "sudoku_pretty_print.h"
#include <algorithm>
#include <iostream>
#include <string>
#include <stdexcept>
namespace Sudoku {
Player::Move::LetterSet const Player::Move::letters_ = Player::Move::makeLetterSet();
void Player::Move::read()
{
row = toSize(getInput());
if (!terminate) {
col = toSize(getInput());
}
if (!terminate) {
letter = toLetter(getInput());
}
}
int Player::Move::getInput()
{
try {
std::string input;
std::cin >> input;
if (std::cin.eof()) {
terminate = true;
return 0;
}
return std::stoi(input);
}
catch (...) {
throw std::runtime_error("Invalid number");
}
}
std::size_t Player::Move::toSize(int input)
{
std::size_t const size = std::size_t(input);
if (input < 0 || size >= Board::size()) {
throw std::runtime_error("Invalid index");
}
return size;
}
Letter Player::Move::toLetter(int input)
{
Letter const letter = Letter(input);
if (letters_.find(letter) == letters_.end()) {
throw std::runtime_error("Invalid letter");
}
return letter;
}
Player::Move::LetterSet Player::Move::makeLetterSet()
{
auto const letters = Alphabet::all();
LetterSet letterSet(letters.begin(), letters.end());
letterSet.insert(Alphabet::space());
return letterSet;
}
Player::Player(Board & board, bool allowBadMoves)
: board_(board)
, checker_(board)
, allowBadMoves_(allowBadMoves)
, earlyTermination_(false)
{
populateFixedCells();
}
bool Player::play()
{
while (true) {
displayBoard();
if (gameOver()) {
break;
}
makeMove();
if (earlyTermination_) {
break;
}
}
return !earlyTermination_ && checker_.check(false);
}
bool Player::gameOver() const
{
for (auto const & row : board_) {
for (auto const & letter : row) {
if (letter == Alphabet::space()) {
return false;
}
}
}
return true;
}
void Player::displayBoard() const
{
static PrintInfo const printInfo = PrintInfo(true, true);
prettyPrint(board_, printInfo);
}
void Player::makeMove()
{
static std::string errMsg;
if (!errMsg.empty()) {
std::cout << "ERROR: " << errMsg << std::endl;
errMsg = "";
}
std::cout << "** Enter Move as: row# col# letter (use 0 for space, use ctrl-d to exit)\n"
<< ">> " << std::flush;
try {
Move move;
move.read();
if (move.terminate) {
earlyTermination_ = true;
return;
}
if (!fixedCell(move.row, move.col)) {
board_[move.row][move.col] = move.letter;
if (!allowBadMoves_ && !checker_.check(true)) {
board_[move.row][move.col] = Alphabet::space();
errMsg = "Invalid last move";
}
}
else {
errMsg = "Cannot override fixed cell";
}
}
catch (std::exception const & ex) {
errMsg = ex.what();
}
}
void Player::populateFixedCells()
{
for (std::size_t row = 0; row < board_.size(); ++row) {
for (std::size_t col = 0; col < board_.size(); ++col) {
if (board_[row][col] != Alphabet::space()) {
fixedCells_.insert(cellKey(row, col));
}
}
}
}
}
|
from __future__ import unicode_literals
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from converter.models import Transformation
from rest_framework import generics
from rest_framework.response import Response
from .models import StagingFolderSource
from .serializers import StagingFolderFileSerializer, StagingFolderSerializer
class APIStagingSourceFileView(generics.GenericAPIView):
"""
get: Details of the selected staging file.
"""
serializer_class = StagingFolderFileSerializer
def get(self, request, staging_folder_pk, encoded_filename):
staging_folder = get_object_or_404(
StagingFolderSource, pk=staging_folder_pk
)
return Response(
StagingFolderFileSerializer(
staging_folder.get_file(encoded_filename=encoded_filename),
context={'request': request}
).data
)
class APIStagingSourceListView(generics.ListAPIView):
"""
get: Returns a list of all the staging folders and the files they contain.
"""
serializer_class = StagingFolderSerializer
queryset = StagingFolderSource.objects.all()
class APIStagingSourceView(generics.RetrieveAPIView):
"""
get: Details of the selected staging folders and the files it contains.
"""
serializer_class = StagingFolderSerializer
queryset = StagingFolderSource.objects.all()
class APIStagingSourceFileImageView(generics.RetrieveAPIView):
"""
get: Returns an image representation of the selected document.
"""
def get_serializer(self, *args, **kwargs):
return None
def get_serializer_class(self):
return None
def retrieve(self, request, *args, **kwargs):
staging_folder = get_object_or_404(
StagingFolderSource, pk=self.kwargs['staging_folder_pk']
)
staging_file = staging_folder.get_file(
encoded_filename=self.kwargs['encoded_filename']
)
size = request.GET.get('size')
return HttpResponse(
staging_file.get_image(
size=size,
transformations=Transformation.objects.get_for_model(
staging_folder, as_classes=True
)
), content_type='image'
)
|
package org.jeecg.modules.resource.service;
import org.jeecg.modules.resource.entity.TbResource;
import com.baomidou.mybatisplus.extension.service.IService;
/**
* @Description: TB资源表
* @Author: jeecg-boot
* @Date: 2022-01-24
* @Version: V1.0
*/
public interface ITbResourceService extends IService<TbResource> {
}
|
/**
* Copyright 2020, SumUp Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { renderHook, act } from '@testing-library/react-hooks';
import { useVisibilityChange } from './useVisibilityChange';
describe('useVisibilityChange', () => {
afterAll(() => {
Object.defineProperty(document, 'hidden', {
configurable: true,
value: false,
});
});
it('should execute the provided callback every time visibility changes with the current visibility as prop', () => {
const visibilityHandler = jest.fn();
renderHook(() => useVisibilityChange(visibilityHandler));
Object.defineProperty(document, 'hidden', {
configurable: true,
value: true,
});
act(() => {
document.dispatchEvent(new Event('visibilitychange'));
});
expect(visibilityHandler).toHaveBeenCalledWith(false);
});
});
|
<gh_stars>0
package execution
import (
"github.com/jensneuse/graphql-go-tools/pkg/document"
"github.com/jensneuse/graphql-go-tools/pkg/lookup"
"github.com/jensneuse/graphql-go-tools/pkg/validation"
"github.com/jensneuse/graphql-go-tools/pkg/validation/rules"
)
// SubscriptionSingleRootField
// https://facebook.github.io/graphql/draft/#sec-Single-root-field
func SubscriptionSingleRootField() rules.Rule {
return func(l *lookup.Lookup, w *lookup.Walker) validation.Result {
for _, operation := range l.OperationDefinitions() {
if operation.OperationType != document.OperationTypeSubscription {
continue
}
rootFields := l.SelectionSetNumRootFields(l.SelectionSet(operation.SelectionSet))
if rootFields > 1 {
return validation.Invalid(validation.SubscriptionSingleRootField, validation.SubscriptionsMustHaveMaxOneRootField, operation.Position, operation.Name)
}
}
return validation.Valid()
}
}
|
<reponame>akito0107/favalid
import { Messager, tester, ValueValidator } from "../core";
export default (date: Date, messager: Messager): ValueValidator => {
return tester((v: Date) => {
return date < v;
}, messager);
};
|
#!/bin/bash
set -e
set -x
# to pin pyenv version, set the PYENV_COMMIT variable
# to the required version commit identifier/tag like in
# PYENV_COMMIT=v1.0.7
if [[ $SODIUM_INSTALL == 'system' ]]; then
wget --timeout=60 https://download.libsodium.org/libsodium/releases/LATEST.tar.gz || \
wget --timeout=60 https://download.libsodium.org/libsodium/releases/LATEST.tar.gz
tar zxvf LATEST.tar.gz
cd libsodium-*
./configure
make
make check
sudo make install
sudo ldconfig
fi
pip install -U tox coverage
if [[ "${TOXENV}" == "pypy" ]]; then
rm -rf ~/.pyenv
git clone https://github.com/yyuu/pyenv.git ~/.pyenv
git -C ~/.pyenv reset --hard ${PYENV_COMMIT:-HEAD}
PYENV_ROOT="$HOME/.pyenv"
PATH="$PYENV_ROOT/bin:$PATH"
eval "$(pyenv init -)"
pyenv install pypy-5.3.1
pyenv global pypy-5.3.1
fi
|
import React, {useState} from 'react';
export default () => {
const [count, setCount] = useState(0);
const onPress = () => setCount(count + 1);
return (
<div>
<button onClick={onPress}>Increment</button>
<p>{count}</p>
</div>
);
}; |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import React from 'react';
import { mount } from 'enzyme';
import {
findTestSubject,
requiredProps,
takeMountedSnapshot,
} from '../../test';
import { keys } from '../../services';
import {
CANCEL_BUTTON,
CONFIRM_BUTTON,
EuiConfirmModal,
} from './confirm_modal';
let onConfirm: jest.Mock;
let onCancel: jest.Mock;
beforeEach(() => {
onConfirm = jest.fn();
onCancel = jest.fn();
});
describe('EuiConfirmModal', () => {
test('renders EuiConfirmModal', () => {
const component = mount(
<EuiConfirmModal
title="A confirmation modal"
onCancel={() => {}}
onConfirm={onConfirm}
cancelButtonText="Cancel Button Text"
confirmButtonText="Confirm Button Text"
{...requiredProps}
>
This is a confirmation modal example
</EuiConfirmModal>
);
expect(
takeMountedSnapshot(component, { hasArrayOutput: true })
).toMatchSnapshot();
});
test('renders EuiConfirmModal without EuiModalBody, if empty', () => {
const component = mount(
<EuiConfirmModal
title="A confirmation modal"
onCancel={() => {}}
onConfirm={onConfirm}
cancelButtonText="Cancel Button Text"
confirmButtonText="Confirm Button Text"
{...requiredProps}
/>
);
expect(
takeMountedSnapshot(component, { hasArrayOutput: true })
).toMatchSnapshot();
});
test('onConfirm', () => {
const component = mount(
<EuiConfirmModal
onCancel={onCancel}
onConfirm={onConfirm}
cancelButtonText="Cancel Button Text"
confirmButtonText="Confirm Button Text"
/>
);
findTestSubject(component, 'confirmModalConfirmButton').simulate('click');
expect(onConfirm).toHaveBeenCalledTimes(1);
expect(onCancel).toHaveBeenCalledTimes(0);
});
test('isLoading', () => {
const component = mount(
<EuiConfirmModal
onCancel={onCancel}
onConfirm={onConfirm}
isLoading
cancelButtonText="Cancel Button Text"
confirmButtonText="Confirm Button Text"
/>
);
findTestSubject(component, 'confirmModalConfirmButton').simulate('click');
expect(onConfirm).toHaveBeenCalledTimes(0);
});
test('onConfirm can be disabled', () => {
const component = mount(
<EuiConfirmModal
onCancel={onCancel}
onConfirm={onConfirm}
cancelButtonText="Cancel Button Text"
confirmButtonText="Confirm Button Text"
confirmButtonDisabled={true}
/>
);
findTestSubject(component, 'confirmModalConfirmButton').simulate('click');
expect(onConfirm).toHaveBeenCalledTimes(0);
expect(onCancel).toHaveBeenCalledTimes(0);
});
describe('onCancel', () => {
test('triggerd by click', () => {
const component = mount(
<EuiConfirmModal
onCancel={onCancel}
onConfirm={onConfirm}
cancelButtonText="Cancel Button Text"
confirmButtonText="Confirm Button Text"
/>
);
findTestSubject(component, 'confirmModalCancelButton').simulate('click');
expect(onConfirm).toHaveBeenCalledTimes(0);
expect(onCancel).toHaveBeenCalledTimes(1);
});
test('triggered by esc key', () => {
const component = mount(
<EuiConfirmModal
onCancel={onCancel}
onConfirm={onConfirm}
cancelButtonText="Cancel Button Text"
confirmButtonText="Confirm Button Text"
data-test-subj="modal"
/>
);
findTestSubject(component, 'modal').simulate('keydown', {
key: keys.ESCAPE,
});
expect(onConfirm).toHaveBeenCalledTimes(0);
expect(onCancel).toHaveBeenCalledTimes(1);
});
});
describe('defaultFocusedButton', () => {
test('is cancel', (done) => {
const component = mount(
<EuiConfirmModal
onCancel={onCancel}
onConfirm={onConfirm}
cancelButtonText="Cancel Button Text"
confirmButtonText="Confirm Button Text"
defaultFocusedButton={CANCEL_BUTTON}
/>
);
// The auto-focus implementation waits a frame before focusing.
requestAnimationFrame(() => {
const button = findTestSubject(
component,
'confirmModalCancelButton'
).getDOMNode();
expect(document.activeElement).toEqual(button);
done();
});
});
test('is confirm', (done) => {
const component = mount(
<EuiConfirmModal
onCancel={onCancel}
onConfirm={onConfirm}
cancelButtonText="Cancel Button Text"
confirmButtonText="Confirm Button Text"
defaultFocusedButton={CONFIRM_BUTTON}
/>
);
// The auto-focus implementation waits a frame before focusing.
requestAnimationFrame(() => {
const button = findTestSubject(
component,
'confirmModalConfirmButton'
).getDOMNode();
expect(document.activeElement).toEqual(button);
done();
});
});
});
});
|
// Copyright 2017 The TIE Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Directive for the TIE menu view.
*/
tieMenu.directive('menuView', [function() {
return {
restrict: 'E',
scope: {},
template: `
<div class="tie-menu-greeting">
<div>
Welcome to TIE (Technical Interview Exercises)!
</div>
<div class="tie-menu-greeting-description">
Click below to get started on a coding exercise:
</div>
</div>
<div class="tie-menu-question-list-wrapper">
<div ng-repeat="questionId in questionIds">
<menu-question-card question-id="{{questionId}}"></menu-question-card>
</div>
<div>
<style>
.tie-menu-greeting {
font-size: 30px;
padding-top: 200px;
text-align: center;
}
.tie-menu-greeting-description {
font-size: 20px;
}
.tie-menu-question-list-wrapper {
align-content: flex-end;
align-items: center;
display: flex;
flex-direction: row;
justify-content: center;
flex-flow: row wrap;
flex-wrap: wrap;
margin: auto;
padding-top: 50px;
width: 60%;
}
</style>
`,
controller: ['$scope',
function($scope) {
// The titles of the questions this menu page is displaying.
$scope.questionIds = [
'reverseWords',
'checkBalancedParentheses',
'findMostCommonCharacter',
'isPalindrome',
'internationalization',
'runLengthEncoding'
];
}
]
};
}]);
|
package de.lmu.cis.ocrd.ml.features;
import com.google.gson.JsonObject;
import de.lmu.cis.ocrd.ml.OCRToken;
import de.lmu.cis.ocrd.util.JSON;
import org.apache.commons.text.similarity.LevenshteinDistance;
public class LevenshteinDistanceFeature extends NamedDoubleFeature {
private final LevenshteinDistance ld;
@SuppressWarnings("WeakerAccess")
LevenshteinDistanceFeature(String name, int threshold) {
super(name);
ld = new LevenshteinDistance(threshold);
}
public LevenshteinDistanceFeature(JsonObject o, ArgumentFactory ignore) {
this(JSON.mustGetNameOrType(o), JSON.mustGet(o, "maxThreshold").getAsInt());
}
public int getThreshold() {
final Integer threshold = ld.getThreshold();
return threshold == null? -1 : threshold;
}
@Override
protected double doCalculate(OCRToken token, int i, int n) {
assert(handlesOCR(i, n));
final String master = token.getMasterOCR().getWordNormalized();
final String slave = getWord(token, i, n).getWordNormalized();
final int distance = ld.apply(master, slave);
return distance == -1? getThreshold(): distance;
}
@Override
public boolean handlesOCR(int i, int n) {
return handlesEverySlaveOCR(i, n);
}
}
|
#include <stdio.h>
int main(void) {
int low, high, i;
// Ask the user to enter two integers
printf("Please enter two integers: ");
scanf("%d%d", &low, &high);
// Print all numbers between the two entered integers
printf("The numbers between %d and %d are:\n", low, high);
for (i = low; i <= high; i++) {
printf("%d\n", i);
}
return 0;
} |
package news_manager_client
import (
grpctransport "github.com/go-kit/kit/transport/grpc"
"github.com/the-gigi/delinkcious/pb/news_service/pb"
om "github.com/the-gigi/delinkcious/pkg/object_model"
"google.golang.org/grpc"
)
type DisconnectFunc func() error
func NewClient(grpcAddr string) (cli om.NewsManager, disconnectFunc DisconnectFunc, err error) {
conn, err := grpc.Dial(grpcAddr, grpc.WithInsecure())
disconnectFunc = func() (err error) {
if conn == nil {
return
}
err = conn.Close()
return
}
if err != nil {
return
}
var getNewsEndpoint = grpctransport.NewClient(
conn, "pb.News", "GetNews",
encodeGetNewsRequest,
decodeGetNewsResponse,
pb.GetNewsResponse{},
).Endpoint()
cli = EndpointSet{
GetNewsEndpoint: getNewsEndpoint,
}
return
}
|
#!/bin/bash
set -e
LB_CHROOT_NAME="$1"
if [ -z "$1" ]; then
LB_CHROOT_NAME="none"
fi
LB_ARGS="--chroot=$LB_CHROOT_NAME"
LB_ARGS="$LB_ARGS --ignore-foundations --user=`id -u` --group=`id -g`"
export LIMBA_FULLNAME="Matthias Klumpp"
export LIMBA_EMAIL="mak@debian.org"
export LIMBA_TARGET_REPO="master"
mkdir -p _bundlepool
function build {
sudo -E limba-build run $LB_ARGS
}
function movetopool {
mv -f lipkg/*.ipk* ../_bundlepool
}
cd libjpeg-turbo
find . -name "*.ipk*" -exec rm -f {} \;
echo "Building libJPEG-Turbo"
build
movetopool
cd ..
cd libpng
find . -name "*.ipk*" -exec rm -f {} \;
echo "Building libpng"
build
movetopool
cd ..
cd physfs
find . -name "*.ipk*" -exec rm -f {} \;
echo "Building PhysFS"
build
movetopool
cd ..
cd freetype
find . -name "*.ipk*" -exec rm -f {} \;
echo "Building FreeType"
build
movetopool
cd ..
cd SDL2-full
find . -name "*.ipk*" -exec rm -f {} \;
echo "Building SDL2"
build
movetopool
cd ..
cd libogg
find . -name "*.ipk*" -exec rm -f {} \;
echo "Building libogg"
build
movetopool
cd ..
cd libvorbis
find . -name "*.ipk*" -exec rm -f {} \;
echo "Building libvorbis"
build
movetopool
cd ..
#cd libXScrnSaver
#find . -name "*.ipk*" -exec rm -f {} \;
#echo "Building libXScrnSaver"
#build
#movetopool
#cd ..
cd neverball
find . -name "*.ipk*" -exec rm -f {} \;
echo "Building Neverball"
build
movetopool
cd ..
echo ""
echo "____________________"
echo ""
echo "Creating repository..."
rm -rf out_lirepo
mkdir -p out_lirepo
cd out_lirepo
for fname in ../_bundlepool/*.ipk
do
limba-build repo-add $fname
done
cd ..
chmod -R 0755 out_lirepo
echo "All done."
|
#!/bin/bash
#
# This script runs qemu and creates a symbolic link named serial.pts
# to the qemu serial console (pts based). Because the qemu pts
# allocation is dynamic, it is preferable to have a stable path to
# avoid visual inspection of the qemu output when connecting to the
# serial console.
case $ARCH in
x86)
qemu=qemu-system-i386
;;
arm)
qemu=qemu-system-arm
;;
esac
echo info chardev | nc -U -l qemu.mon | egrep -o "/dev/pts/[0-9]*" | xargs -I PTS ln -fs PTS serial.pts &
$qemu "$@" -monitor unix:qemu.mon
rm qemu.mon
rm -f serial.pts
|
<filename>app/actions/transactions/fetchEtherTransactions.js<gh_stars>1-10
import web3Service from 'quid-wallet/app/services/web3Service';
import etherscanService from 'quid-wallet/app/services/etherscanApiService';
import { actions } from './actions';
// TODO move to constants.js
const ETHER_ASSET_DUMMY_ADDRESS = '0x000_ether';
export const fetchEtherTransactions = (address) => {
return async (dispatch, getState) => {
dispatch({type: actions.FETCHING_TOKEN_TRANSACTIONS, payload: {address, tokenAddress: ETHER_ASSET_DUMMY_ADDRESS}});
const state = getState();
const web3 = web3Service.getWeb3();
const lastCheckBlockDct = (state.data.lastBlockNumberCheck[address] || {});
const lastBlockChecked = lastCheckBlockDct[`${ETHER_ASSET_DUMMY_ADDRESS}-IN&OUT`] || 0;
try {
// get current block number
const curBlockNumber = await web3.eth.getBlockNumberPromise();
const transfers = await etherscanService.getTransactions({
address,
startBlock: lastBlockChecked,
endBlock: curBlockNumber
});
dispatch({type: actions.CREATE_ASSET_TRANSFERS, payload: transfers});
dispatch({type: actions.GOT_TOKEN_TRANSACTIONS, payload: {
address,
tokenAddress: ETHER_ASSET_DUMMY_ADDRESS,
blockNumber: curBlockNumber,
direction: 'IN&OUT'
}});
dispatch({type: actions.STOP_SPINNER});
return transfers;
} catch (err) {
dispatch({type: actions.STOP_SPINNER});
throw (err);
}
};
}
// const fetchInternalTranscations = ({lastCheckBlockDct, address, dispatch, curBlockNumber, AssetTransfer}) => {
// const lastBlockChecked = lastCheckBlockDct[`${ETHER_ASSET_DUMMY_ADDRESS}-INTERNAL`] || 0;
// etherscanService.getInternalTransactions({
// address,
// startBlock: lastBlockChecked,
// endBlock: curBlockNumber})
// .then((transactions) => {
// const transfers = transactions.map(tx => {
// const direction = (tx.from === address) ? 'OUT' : 'IN';
// const counterpartyAddress = (direction === 'IN') ? tx.from : tx.to;
// const rawValue = tx.value;
// const value = displayBigNumber(rawValue, 18);
// const params = {
// txHash: tx.hash,
// address,
// tokenAddress: ETHER_ASSET_DUMMY_ADDRESS,
// timestamp: tx.timeStamp,
// blockNumber: tx.blockNumber,
// counterpartyAddress,
// direction,
// value,
// rawValue,
// status: (1 - tx.isError)
// };
// const transfer = AssetTransfer.createFromParams(params);
// return transfer;
// });
// dispatch({type: actions.CREATE_ASSET_TRANSFERS, payload: transfers});
// dispatch({type: actions.GOT_TOKEN_TRANSACTIONS, payload: {
// address,
// tokenAddress: ETHER_ASSET_DUMMY_ADDRESS,
// blockNumber: curBlockNumber,
// direction: 'INTERNAL'
// }});
// });
// };
|
import static org.junit.jupiter.api.Assertions.*;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.junit.jupiter.api.Test;
class ProblemTest
{
@Test
void testExampleFromProblem1()
{
Set<String> words = new HashSet<String>();
words.add("quick");
words.add("brown");
words.add("the");
words.add("fox");
List<String> expected = new ArrayList<>();
expected.add("the");
expected.add("quick");
expected.add("brown");
expected.add("fox");
assertEquals(expected, Problem.getOriginalSentence("thequickbrownfox", words));
}
@Test
void testExampleFromProblem2()
{
Set<String> words = new HashSet<String>();
words.add("bed");
words.add("bath");
words.add("bedbath");
words.add("and");
words.add("beyond");
List<String> expected1 = new ArrayList<>();
expected1.add("bed");
expected1.add("bath");
expected1.add("and");
expected1.add("beyond");
List<String> expected2 = new ArrayList<>();
expected2.add("bedbath");
expected2.add("and");
expected2.add("beyond");
assertTrue(expected1.equals(Problem.getOriginalSentence("bedbathandbeyond", words)) ||
expected2.equals(Problem.getOriginalSentence("bedbathandbeyond", words)));
}
@Test
void testNullInput()
{
Set<String> words = new HashSet<String>();
words.add("bed");
words.add("bath");
words.add("bedbath");
words.add("and");
words.add("beyond");
assertNull(Problem.getOriginalSentence(null, words));
assertNull(Problem.getOriginalSentence("", null));
}
@Test
void testEmptyInput()
{
Set<String> words = new HashSet<String>();
words.add("bed");
words.add("bath");
words.add("bedbath");
words.add("and");
words.add("beyond");
assertEquals(new ArrayList<>(), Problem.getOriginalSentence("", words));
assertEquals(new ArrayList<>(), Problem.getOriginalSentence("", new HashSet<>()));
}
@Test
void testNoReconstructionPossible()
{
Set<String> words = new HashSet<String>();
words.add("quick");
words.add("brown");
words.add("the");
words.add("fox");
assertEquals(null, Problem.getOriginalSentence("thequickbrownfo", words));
}
} |
import autoprefixer from 'autoprefixer';
import combineSelectors from 'postcss-combine-duplicated-selectors';
import combineMediaQueries from 'postcss-combine-media-query';
import compress from 'vite-plugin-compression';
import imageMin from 'vite-plugin-imagemin';
import ViteFonts from 'vite-plugin-fonts'
const path = require('path')
const isProd = process.env.NODE_ENV === 'production';
export default {
publicDir : 'public',
assetsInclude: ['**/*.otf'],
css: {
postcss: {
plugins: [
combineMediaQueries(),
combineSelectors({ removeDuplicatedValues: true }),
autoprefixer(),
],
},
},
build: {
minify: isProd,
root: path.join(__dirname, "src"),
outDir: path.join(__dirname, "dist"),
rollupOptions: {
input: {
'fields': path.resolve(__dirname, 'src', 'js', 'Fields.js'),
'style': path.resolve(__dirname, 'src', 'scss', 'style.scss'),
'demo': path.resolve(__dirname, 'src', 'scss', 'demo.scss'),
},
output: {
entryFileNames: `[name].dist.js`,
chunkFileNames: `[name].dist.js`,
assetFileNames: `[name].dist.[ext]`
}
},
},
plugins: [
imageMin({
svgo: {
plugins: [
{ name: 'RemoveTitle', active: false },
{ name: 'RemoveDescription', active: false },
{ name: 'RemoveViewBox', active: false },
{ name: 'removeDimensions', active: true },
{ name: 'removeScriptElement', active: true },
{ name: 'removeStyleElement', active: true },
],
},
}),
compress({
algorithm: 'brotliCompress',
}),
ViteFonts({
google: {
families: ['Material Icons']
},
}),
],
server: {
// open: true,
// proxy: {
// "/base": {
// target: "http://localhost:19000",
// // changeOrigin: true,
// rewrite: (path) => path.replace(/^\/base/, ""),
// },
// },
},
}; |
<filename>src/main/java/br/com/zupacademy/breno/mercadolivre/categories/CategoryRegisterRequest.java
package br.com.zupacademy.breno.mercadolivre.categories;
import br.com.zupacademy.breno.mercadolivre.validations.ExistId;
import br.com.zupacademy.breno.mercadolivre.validations.UniqueValue;
import com.fasterxml.jackson.annotation.JsonProperty;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Positive;
import java.util.Optional;
public class CategoryRegisterRequest {
@NotBlank
@UniqueValue(domainClass = Category.class, fieldName = "name")
private String name;
@JsonProperty(value = "superCategory")
@Positive
@ExistId(domainClass = Category.class, fieldName = "id", permiteNulo = true)
private Long superCategoryId;
public CategoryRegisterRequest(String name, Long superCategoryId) {
this.name = name;
this.superCategoryId = superCategoryId;
}
public Optional<Category> convert(CategoryRepository repository) {
Category category = new Category(name);
if(superCategoryId != null) {
Optional<Category> superCategoryObject = repository.findById(superCategoryId);
category.setSuperCategory(superCategoryObject.get());
}
return Optional.of(category);
}
}
|
function main () {
SRC_ANIME_ID=$1
F_RATE=$2
S3_BUCKET=$3
echo "$SRC_ANIME_ID, $F_RATE, $S3_BUCKET"
SRC_MOD=$(($SRC_ANIME_ID % 1000))
S3_BASE_PATH=s3://$S3_BUCKET/animes/$SRC_MOD/anime_$SRC_ANIME_ID
MP4_FILENAME=${SRC_ANIME_ID}.mp4
S3_DST_MP4_PATH=$S3_BASE_PATH/$MP4_FILENAME
# exist check
#echo $S3_DST_MP4_PATH
EXIST_MP4=`aws s3 ls $S3_DST_MP4_PATH | wc -l | tr -d ' ' || :`
if [ $EXIST_MP4 -eq 1 ]; then
#echo "already exist at $S3_DST_MP4_PATH"
echo exist
exit 0
fi
#echo "start encoding mp4...: $S3_DST_MP4_PATH"
# mkdir
TMP_DIR=/tmp/anime/$SRC_ANIME_ID
WHITE_DIR=/tmp/anime/white
mkdir -p $TMP_DIR
mkdir -p $WHITE_DIR
cd $TMP_DIR
# sync images
#echo $S3_BASE_PATH
aws s3 sync $S3_BASE_PATH . || :
# size
HEAD_IMG_FILE=`ls -1 $TMP_DIR | sort | head -n 1`
HEAD_IMG_PATH=$TMP_DIR/$HEAD_IMG_FILE
SIZE=`identify -format '%wx%h' $HEAD_IMG_PATH`
#echo $HEAD_IMG_PATH
#echo $SIZE
# create white
WHITE_PATH=$WHITE_DIR/white.gif
`convert -size $SIZE xc:white $WHITE_PATH`
#ls $WHITE_PATH
# composite white
RES=`find . -type f -name '*.png' | xargs -I {} composite {} $WHITE_PATH {}`
#echo $RES
# mp4
SRC_PATH=$TMP_DIR/${SRC_ANIME_ID}_%d.png
DST_PATH=$TMP_DIR/$MP4_FILENAME
ffmpeg -y -r $F_RATE -i $SRC_PATH -pix_fmt yuv420p -vf 'scale=trunc(iw/2)*2:trunc(ih/2)*2' $DST_PATH
#echo $DST_PATH
# upload mp4 to s3
aws s3 cp $DST_PATH $S3_DST_MP4_PATH || :
echo finish
}
function handler () {
echo $JQ_CMD
INPUT=`echo $1 | sed -e 's/\\\\//g'`
echo $INPUT
SRC_ANIME_ID=`echo $INPUT | $JQ_CMD .anime_id`
F_RATE=`echo $INPUT | $JQ_CMD .frame_rate`
S3_BUCKET=`echo $INPUT | $JQ_CMD .bucket | sed -e 's/"//g'`
main $SRC_ANIME_ID $F_RATE $S3_BUCKET
}
|
#!/usr/bin/env bash
set -e
# ensure we're not on a detached head
git checkout master
# until we switch to the new kubernetes / jenkins credential implementation use git credentials store
git config credential.helper store
# display the current namespace
jx ns -b
#jx step git credentials
jx step gpg credentials
export VERSION="$(jx-release-version)"
echo "Setting the maven version to ${VERSION}"
mvn versions:set -DnewVersion=${VERSION}
mvn clean -B
mvn -V -B -e -U install org.sonatype.plugins:nexus-staging-maven-plugin:1.6.7:deploy -P release -P openshift -DnexusUrl=https://oss.sonatype.org -DserverId=oss-sonatype-staging
# now release the sonatype staging repo
jx step nexus release
jx step tag --version ${VERSION}
jx waitfor artifact -g io.jenkins-x.client -a jx-java-client -v ${VERSION}
updatebot push-version --kind maven io.jenkins-x.client:jx-java-client ${VERSION}
updatebot update
|
/*
* Copyright (c) CERN 2013-2019
*
* Copyright (c) Members of the EMI Collaboration. 2010-2013
* See http://www.eu-emi.eu/partners for details on the copyright
* holders.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#ifndef ARCHIVINGSTATEUPDATER_H_
#define ARCHIVINGSTATEUPDATER_H_
#include <string>
#include <vector>
#include <boost/thread.hpp>
#include <boost/tuple/tuple.hpp>
#include "db/generic/SingleDbInstance.h"
#include "common/Logger.h"
#include "StateUpdater.h"
using namespace fts3::common;
/**
* A utility for carrying out asynchronous state and timestamp updates,
* which are accumulated and than send to DB at the same time
*/
class ArchivingStateUpdater : public StateUpdater
{
public:
// this is necessary because otherwise the operator would be hidden by the following one
using StateUpdater::operator();
/**
* Updates the Archiving start time for the given jobs/files
*
* @param jobs : jobs with respective files
*/
void operator()(const std::map<std::string, std::map<std::string, std::vector<uint64_t> > > &jobs)
{
try {
db.setArchivingStartTime(jobs);
}
catch (std::exception& ex) {
FTS3_COMMON_LOGGER_NEWLOG(ERR) << ex.what() << commit;
}
catch(...) {
FTS3_COMMON_LOGGER_NEWLOG(ERR) << "Exception while setting the archiving start time!" << commit;
}
}
/**
* Updates status per file
*/
void operator()(const std::string &jobId, uint64_t fileId, const std::string &state, const JobError &error)
{
// lock the vector
boost::mutex::scoped_lock lock(m);
updates.emplace_back(jobId, fileId, state, error.String(), error.IsRecoverable());
FTS3_COMMON_LOGGER_NEWLOG(INFO) << "ARCHIVING Update : "
<< fileId << " " << state << " " << error.String() << " " << jobId << " " << error.IsRecoverable() << commit;
}
/// Destructor
virtual ~ArchivingStateUpdater() {}
using StateUpdater::recover;
private:
friend class QoSServer;
/// Default constructor
ArchivingStateUpdater() : StateUpdater("_archiving") {}
/// Copy constructor
ArchivingStateUpdater(ArchivingStateUpdater const &) = delete;
/// Assignment operator
ArchivingStateUpdater & operator=(ArchivingStateUpdater const &) = delete;
void run()
{
runImpl(&GenericDbIfce::updateArchivingState);
}
//TODO check if this is needed
void recover(const std::vector<MinFileStatus> &recover)
{
if (!recover.empty()) {
// lock the vector
boost::mutex::scoped_lock lock(m);
// put the items back
updates.insert(updates.end(), recover.begin(), recover.end());
}
fts3::events::MessageBringonline msg;
for (auto itFind = recover.begin(); itFind != recover.end(); ++itFind)
{
msg.set_file_id(itFind->fileId);
msg.set_job_id(itFind->jobId);
msg.set_transfer_status(itFind->state);
msg.set_transfer_message(itFind->reason);
//store the states into fs to be restored in the next run
producer.runProducerStaging(msg);
}
}
};
#endif // ARCHIVINGSTATEUPDATER_H_
|
#!/bin/bash
set -e
script_name="SubcorticalAlign_MNI_Affine.sh"
echo "${script_name}: START"
AtlasSpaceFolder="$1"
echo "${script_name}: AtlasSpaceFolder: ${AtlasSpaceFolder}"
ROIFolder="$2"
echo "${script_name}: ROIFolder: ${ROIFolder}"
FinalfMRIResolution="$3"
echo "${script_name}: FinalfMRIResolution: ${FinalfMRIResolution}"
ResultsFolder="$4"
echo "${script_name}: ResultsFolder: ${ResultsFolder}"
NameOffMRI="$5"
echo "${script_name}: NameOffMRI: ${NameOffMRI}"
SmoothingFWHM="$6"
echo "${script_name}: SmoothingFWHM: ${SmoothingFWHM}"
BrainOrdinatesResolution="$7"
echo "${script_name}: BrainOrdinatesResolution: ${BrainOrdinatesResolution}"
VolumefMRI="${ResultsFolder}/${NameOffMRI}"
echo "${script_name}: VolumefMRI: ${VolumefMRI}"
Sigma=`echo "$SmoothingFWHM / ( 2 * ( sqrt ( 2 * l ( 2 ) ) ) )" | bc -l`
echo "${script_name}: Sigma: ${Sigma}"
#NOTE: wmparc has dashes in structure names, which -cifti-create-* won't accept
#ROIs files have acceptable structure names
#deal with fsl_sub being silly when we want to use numeric equality on decimals
unset POSIXLY_CORRECT
############# INJECTING BABY -> MNI AFFINE TRANSFORM ###############
flirt -in "$VolumefMRI".nii.gz -ref "$ROIFolder"/Atlas_ROIs."$BrainOrdinatesResolution".nii.gz -applyxfm -init "$HCPPIPEDIR_Templates"/InfMNI_2AdultMNI_Step2.mat -out "$VolumefMRI"_2MNI.nii.gz
# Testing this out.
${CARET7DIR}/wb_command -volume-affine-resample "$ROIFolder"/ROIs."$BrainOrdinatesResolution".nii.gz "$HCPPIPEDIR_Templates"/InfMNI_2AdultMNI_Step2.mat "$VolumefMRI"_2MNI.nii.gz ENCLOSING_VOXEL "$ResultsFolder"/ROIs."$BrainOrdinatesResolution".nii.gz -flirt "$ROIFolder"/ROIs."$BrainOrdinatesResolution".nii.gz "$VolumefMRI"_2MNI.nii.gz
####################################################################
## For babies, we have to use volume-parcel-resampling-generic, otherwise this will not map well. We will have to redo with current HCP pipeline if/when we have our own volume space.
if [ 1 -eq `echo "$BrainOrdinatesResolution == $FinalfMRIResolution" | bc -l` ] ; then
########## EDIT VolumefMRI input ##############
${CARET7DIR}/wb_command -volume-parcel-resampling "$VolumefMRI"_2MNI.nii.gz "$ROIFolder"/ROIs."$BrainOrdinatesResolution".nii.gz "$ROIFolder"/Atlas_ROIs."$BrainOrdinatesResolution".nii.gz $Sigma "$VolumefMRI"_AtlasSubcortical_s"$SmoothingFWHM".nii.gz -fix-zeros
else
applywarp --interp=nn -i "$AtlasSpaceFolder"/wmparc.nii.gz -r "$VolumefMRI"_2MNI.nii.gz -o "$ResultsFolder"/wmparc."$FinalfMRIResolution".nii.gz
##### INJECTING BABY -> MNI AFFINE TRANSFORM ######
flirt -in "$AtlasSpaceFolder"/ROIs/wmparc."$FinalfMRIResolution".nii.gz -ref "$VolumefMRI"_2MNI.nii.gz -interp nearestneighbour -applyxfm -init "$HCPPIPEDIR_Templates"/InfMNI_2AdultMNI_Step2.mat -out "$AtlasSpaceFolder"/ROIs/wmparc."$FinalfMRIResolution".nii.gz
###################################################
${CARET7DIR}/wb_command -volume-label-import "$ResultsFolder"/wmparc."$FinalfMRIResolution".nii.gz ${HCPPIPEDIR_Config}/FreeSurferSubcorticalLabelTableLut.txt "$ResultsFolder"/ROIs."$FinalfMRIResolution".nii.gz -discard-others
${CARET7DIR}/wb_command -volume-parcel-resampling-generic "$VolumefMRI"_2MNI.nii.gz "$ResultsFolder"/ROIs."$FinalfMRIResolution".nii.gz "$ROIFolder"/Atlas_ROIs."$BrainOrdinatesResolution".nii.gz $Sigma "$VolumefMRI"_AtlasSubcortical_s"$SmoothingFWHM".nii.gz -fix-zeros
rm "$ResultsFolder"/wmparc."$FinalfMRIResolution".nii.gz
fi
echo "${script_name}: END"
|
#include <iostream>
// Function to calculate the sum of two matrices
void sumMatrix(int A[][3], int B[][3])
{
int C[3][3];
int i, j;
for (i = 0; i < 3; i++)
{
for (j = 0; j < 3; j++)
{
C[i][j] = A[i][j] + B[i][j];
std::cout << C[i][j] << " ";
}
std::cout << std:: endl;
}
}
int main()
{
int A[3][3] = {{1,2,3},{4,5,6},{7,8,9}};
int B[3][3] = {{1,1,1},{1,1,1},{1,1,1}};
sumMatrix(A, B);
return 0;
} |
<reponame>jcward/tinypng-rb
#!/usr/bin/env ruby
#
# Quick'n'dirty script for PNG compression using the TinyPNG service
#
# by <NAME> (jcward.com, github.com/jcward)
#
# Notes:
# - requires sign-up, api key from: https://tinypng.com/developers
# - modifies files in place
require 'json'
$api_key = 'secret'
$min_size = 1024 # ignore files less than 1kb
if (ARGV.length==0) then
puts "usage: tinypng.rb <dir or file> [more dirs or files]"
end
ARGV.each { |a|
if a.match(/^\d+$/) then
$min_size = a.to_i
puts "Min filesize set to #{$min_size}"
end
}
$files = []
def push_file f
if (!$files.include?(f) &&
f.match(/png$/i) &&
File.size(f) > $min_size) then
$files.push(f)
end
end
ARGV.each { |d_or_f|
if (File.directory?(d_or_f)) then
`find "#{d_or_f}" -type f`.split("\n").each { |f|
push_file f
}
else
push_file d_or_f
end
}
$files.select! { |f| f.match(/png$/i) }
puts "Running tinypng on #{$files.length} files over #{$min_size} bytes..."
$files.each { |f|
data = JSON.parse `curl -s --user api:#{ $api_key } --data-binary @#{f} https://api.tinypng.com/shrink`
puts " - #{f} #{data['input']['size']} --> #{data['output']['size']} (#{data['output']['ratio']*100}%)"
`cp #{f} /tmp/`
`curl -s #{data['output']['url']} -o #{f}`
}
|
<reponame>slotix/dfk-parser<gh_stars>100-1000
package fetch
import (
"testing"
"time"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
"github.com/temoto/robotstxt"
)
func TestIsRobotsTxt(t *testing.T) {
assert.Equal(t, false, isRobotsTxt("http://google.com/robots.txst"))
assert.Equal(t, true, isRobotsTxt("http://google.com/robots.txt"))
}
func TestRobotstxtData(t *testing.T) {
addr := "localhost:12345"
//test AllowedByRobots func
robots, err := robotstxt.FromString(robotsContent)
assert.NoError(t, err, "No error returned")
assert.Equal(t, true, AllowedByRobots("http://"+addr+"/allowed", robots), "Test allowed url")
assert.Equal(t, false, AllowedByRobots("http://"+addr+"/disallowed", robots), "Test disallowed url")
assert.Equal(t, time.Duration(0), GetCrawlDelay(robots))
robots = nil
assert.Equal(t, true, AllowedByRobots("http://"+addr+"/allowed", robots), "Test allowed url")
serverCfg := Config{
Host: viper.GetString("DFK_FETCH"),
}
htmlServer := Start(serverCfg)
////////
rd, err := RobotstxtData(tsURL)
assert.NoError(t, err, "No error returned")
assert.NotNil(t, rd, "Not nil returned")
_, err = RobotstxtData("invalid_host")
assert.Error(t, err, "error returned")
htmlServer.Stop()
}
|
<reponame>stankolubomir/remp
require('jquery-typeahead');
$(document).ready(function() {
$('.js-typeahead').typeahead({
dynamic: true,
filter: false,
highlight: true,
maxItem: false,
cancelButton: false,
loadingAnimation: true,
emptyTemplate: "No results found for <strong>{{query}}</strong>",
source: {
ajax: {
url: '/search',
data: {
term: '{{query}}'
},
}
},
callback: {
onPopulateSource: function (node, data, group, path) {
data.forEach( searchResult => {
// TODO: applications should register icons and formatting themselves
switch (searchResult['type']) {
case 'article':
searchResult['title'] = '<i class="zmdi zmdi-library"></i> ' + searchResult['title'];
break;
case 'author':
searchResult['name'] = '<i class="zmdi zmdi-account-box"></i> ' + searchResult['name'];
break;
case 'segment':
searchResult['name'] = '<i class="zmdi zmdi-accounts-list-alt"></i> ' + searchResult['name'];
break;
case 'campaign':
searchResult['name'] = '<i class="zmdi zmdi-ticket-star"></i> ' + searchResult['name'];
break;
case 'banner':
searchResult['name'] = '<i class="zmdi zmdi-collection-folder-image"></i> ' + searchResult['name'];
break;
}
// format tags
if (searchResult['tags']) {
let tags = '';
searchResult['tags'].forEach(tag => {
tags += `<span class="label label-default palette-Blue-Grey-50 bg">${tag}</span> `;
});
searchResult['tags'] = tags;
}
// format sections
if (searchResult['sections']) {
let sections = '';
searchResult['sections'].forEach(section => {
sections += `<span class="label label-default palette-Deep-Orange-50 bg">${section}</span> `;
});
searchResult['sections'] = sections;
}
// format banners
if (searchResult['banners']) {
let banners = '';
searchResult['banners'].forEach(section => {
banners += `<span class="label label-default palette-Blue-Grey-50 bg">${section}</span> `;
});
searchResult['banners'] = banners;
}
});
this.options.display = [
'title',
'name',
'tags',
'sections',
'code',
'banners',
];
$('.typeahead__field .preloader').css('visibility', 'hidden');
return data;
},
onClickBefore: function (node, a, item, event) {
event.preventDefault();
window.location = item.search_result_url;
},
onSubmit: function (node, form, item, event) {
event.preventDefault();
},
onCancel: function () {
$('.typeahead__field .preloader').css('visibility', 'hidden');
},
onSendRequest: function () {
$('.typeahead__field .preloader').css('visibility', 'visible');
}
}
});
});
|
<filename>demos/3d/shadow/index.js
const {Scene} = spritejs;
const {Camera, Mesh3d, Plane, shaders} = spritejs.ext3d;
const container = document.getElementById('container');
const scene = new Scene({
container,
displayRatio: 2,
});
const layer = scene.layer3d('fglayer', {
camera: {
fov: 35,
},
});
layer.camera.attributes.pos = [5, 4, 10];
layer.setOrbit();
const light = new Camera(layer.gl, {
left: -3,
right: 3,
bottom: -3,
top: 3,
near: 1,
far: 20,
});
light.attributes.pos = [3, 10, 3];
light.lookAt([0, 0, 0]);
const shadow = layer.createShadow({light});
const texture = layer.createTexture('https://p2.ssl.qhimg.com/t01155feb9a795bdd05.jpg');
const model = layer.loadModel('https://s0.ssl.qhres2.com/static/0baccc5ad3cd5b8c.json');
const program = layer.createProgram({
...shaders.TEXTURE_WITH_SHADOW,
cullFace: null,
texture,
});
const plane = new Mesh3d(program, {model});
window.plane = plane;
layer.append(plane);
const waterTexture = layer.createTexture('https://p0.ssl.qhimg.com/t01db936e50ab52f10a.jpg');
const program2 = layer.createProgram({
...shaders.TEXTURE_WITH_SHADOW,
cullFace: null,
texture: waterTexture,
});
const ground = new Plane(program2, {
rotateX: 90,
scale: 6,
y: -3,
});
layer.append(ground);
shadow.add(plane);
shadow.add(ground);
layer.setShadow(shadow);
layer.tick((t) => {
// A bit of plane animation
if(plane) {
plane.attributes.z = Math.sin(t * 0.001);
plane.attributes.rotateX = Math.sin(t * 0.001 + 2) * 18;
plane.attributes.rotateY = Math.sin(t * 0.001 - 4) * -18;
}
}); |
<reponame>vaniot-s/sentry
import {t} from 'app/locale';
import {RequestError} from './types';
type Error = {
type: RequestError;
message: string;
};
// TODO(ts): define the correct error type
function handleError(error: any): Error {
const errorMessage = error.responseJSON?.relayPiiConfig[0];
if (!errorMessage) {
return {
type: RequestError.Unknown,
message: t('Unknown error occurred while saving data scrubbing rule'),
};
}
if (errorMessage.startsWith('invalid selector: ')) {
for (const line of errorMessage.split('\n')) {
if (line.startsWith('1 | ')) {
const selector = line.slice(3);
return {
type: RequestError.InvalidSelector,
message: t('Invalid source value: %s', selector),
};
}
}
}
if (errorMessage.startsWith('regex parse error:')) {
for (const line of errorMessage.split('\n')) {
if (line.startsWith('error:')) {
const regex = line.slice(6).replace(/at line \d+ column \d+/, '');
return {
type: RequestError.RegexParse,
message: t('Invalid regex: %s', regex),
};
}
}
}
return {
type: RequestError.Unknown,
message: t('An unknown error occurred while saving data scrubbing rule'),
};
}
export default handleError;
|
#!/bin/sh
apt-get update
apt-get -yq install \
build-essential \
ccache \
gdb \
lcov \
libbz2-dev \
libffi-dev \
libgdbm-dev \
libgdbm-compat-dev \
liblzma-dev \
libncurses5-dev \
libreadline6-dev \
libsqlite3-dev \
libssl-dev \
lzma \
lzma-dev \
tk-dev \
uuid-dev \
xvfb \
zlib1g-dev
|
#!/usr/bin/env bash
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
SCRIPT_ROOT=$(dirname ${BASH_SOURCE})/..
CODEGEN_PKG=${CODEGEN_PKG:-$(cd ${SCRIPT_ROOT}; ls -d -1 ./vendor/k8s.io/code-generator 2>/dev/null || echo ../code-generator)}
${CODEGEN_PKG}/generate-groups.sh "deepcopy,defaulter,client,lister,informer" \
k8s.io/test-infra/prow/client k8s.io/test-infra/prow/apis \
prowjobs:v1
|
<filename>src/app/layout/configuration/plot-camera/plot-camera.component.ts<gh_stars>1-10
import { Component, OnInit, NgZone, NgModule } from '@angular/core';
import { DomSanitizer } from '@angular/platform-browser';
import { Router } from '@angular/router';
import { HttpClient, HttpHeaders, HttpErrorResponse } from '@angular/common/http';
import { Socket } from 'ng-socket-io';
import * as io from 'socket.io-client';
import { Observable } from 'rxjs/Observable'
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/catch';
import * as data from '../../../../../config';
@Component({
selector: 'app-plot-camera',
templateUrl: './plot-camera.component.html',
styleUrls: ['./plot-camera.component.css']
})
export class PlotCameraComponent implements OnInit {
vmUrl: string;
cameras: any[];
thumbnail: any;
thumbnailImg: Boolean;
token: string;
socket: SocketIOClient.Socket;
constructor(public router: Router, private http: HttpClient, private zone: NgZone, public domSanitizer: DomSanitizer) {
var session = JSON.parse(localStorage.getItem('sessionConfiguration'));
if (session != null) {
this.vmUrl = session.vmUrl;
}
this.token = localStorage.getItem('accesstoken');
this.socket = io.connect(this.vmUrl, { secure: true });
this.thumbnailImg = false;
}
ngOnInit() {
this.camDisplay();
this.socketConnection();
}
socketConnection() {
this.socket.on('rawImage', (msg: any) => {
var data = JSON.parse(msg.message);
this.thumbnailImg = true;
this.thumbnail = data.imgBase64;
//this.zone.run(() => { this.thumbnail = data.imgBase64; });
});
}
camDisplay() {
this.cameras = [];
this.http.get<any[]>(this.vmUrl + '/cameras',
// {
// headers: new HttpHeaders().set('Authorization', "Bearer " + this.token),
// }
).subscribe(
res => {
res.forEach(item => {
this.cameras.push({ 'deviceName': item.deviceName, 'streamingUrl': item.streamingUrl, "_id": item._id, "aggregatorId": item.aggregatorId, "computeEngineId": item.computeEngineId });
});
console.log("Cameras: ", this.cameras);
},
err => {
console.log("Error occured");
});
};
getRawImage(camId, streamingUrl) {
console.log("Streaming url:", streamingUrl);
var data = {
streamingUrl: streamingUrl,
cameraId: camId
};
this.http.post(this.vmUrl + '/cameras/raw', data,
// {
// headers: new HttpHeaders().set('Authorization', "Bearer " + this.token),
// }
)
.subscribe(
res => {
console.log("In take preview");
console.log(res);
},
err => {
console.log("error response", err);
});
}
}
|
<filename>src/sport/ListCountriesResponse.cpp
/**
* Copyright 2017 <NAME>. Distributed under the MIT license.
*/
#include "greentop/sport/ListCountriesResponse.h"
namespace greentop {
namespace sport {
ListCountriesResponse::ListCountriesResponse() {
}
ListCountriesResponse::ListCountriesResponse(const std::vector<CountryCodeResult>& countryCodeResults) :
countryCodeResults(countryCodeResults) {
}
void ListCountriesResponse::fromJson(const Json::Value& json) {
if (validateJson(json)) {
for (unsigned i = 0; i < json.size(); ++i) {
CountryCodeResult countryCodeResult;
countryCodeResult.fromJson(json[i]);
countryCodeResults.push_back(countryCodeResult);
}
}
}
Json::Value ListCountriesResponse::toJson() const {
Json::Value json(Json::arrayValue);
if (countryCodeResults.size() > 0) {
for (unsigned i = 0; i < countryCodeResults.size(); ++i) {
json.append(countryCodeResults[i].toJson());
}
}
return json;
}
bool ListCountriesResponse::isValid() const {
return countryCodeResults.size() > 0;
}
const std::vector<CountryCodeResult>& ListCountriesResponse::getCountryCodeResults() const {
return countryCodeResults;
}
void ListCountriesResponse::setCountryCodeResults(const std::vector<CountryCodeResult>& countryCodeResults) {
this->countryCodeResults = countryCodeResults;
}
}
}
|
require 'parallel'
require 'postgresql_cursor'
require_relative '../support/database'
require_relative '../sequent'
require_relative '../util/timer'
require_relative '../util/printer'
require_relative './projectors'
require_relative 'planner'
require_relative 'executor'
require_relative 'sql'
module Sequent
module Migrations
class MigrationError < RuntimeError; end
##
# ViewSchema is used for migration of you view_schema. For instance
# when you create new Projectors or change existing Projectors.
#
# The following migrations are supported:
#
# - ReplayTable (Projector migrations)
# - AlterTable (For instance if you introduce a new column)
#
# To maintain your migrations you need to:
# 1. Create a class that extends `Sequent::Migrations::Projectors` and specify in `Sequent.configuration.migrations_class_name`
# 2. Define per version which migrations you want to execute
# See the definition of `Sequent::Migrations::Projectors.versions` and `Sequent::Migrations::Projectors.version`
# 3. Specify in Sequent where your sql files reside (Sequent.configuration.migration_sql_files_directory)
# 4. Ensure that you add %SUFFIX% to each name that needs to be unique in postgres (like TABLE names, INDEX names, PRIMARY KEYS)
# E.g. `create table foo%SUFFIX% (id serial NOT NULL, CONSTRAINT foo_pkey%SUFFIX% PRIMARY KEY (id))`
# 5. If you want to run an `alter_table` migration ensure that
# a sql file named `table_name_VERSION.sql` exists.
#
# Example:
#
# class AppMigrations < Sequent::Migrations::Projectors
# def self.version
# '3'
# end
#
# def self.versions
# {
# '1' => [Sequent.all_projectors],
# '2' => [
# UserProjector,
# InvoiceProjector,
# ],
# '3' => [
# Sequent::Migrations.alter_table(UserRecord)
# ]
#
# }
# end
#
# end
class ViewSchema
# Corresponds with the index on aggregate_id column in the event_records table
#
# Since we replay in batches of the first 3 chars of the uuid we created an index on
# these 3 characters. Hence the name ;-)
#
# This also means that the online replay is divided up into 16**3 groups
# This might seem a lot for starting event store, but when you will get more
# events, you will see that this is pretty good partitioned.
LENGTH_OF_SUBSTRING_INDEX_ON_AGGREGATE_ID_IN_EVENT_STORE = 3
include Sequent::Util::Timer
include Sequent::Util::Printer
include Sql
class Versions < Sequent::ApplicationRecord; end
class ReplayedIds < Sequent::ApplicationRecord; end
attr_reader :view_schema, :db_config, :logger
def initialize(db_config:)
@db_config = db_config
@view_schema = Sequent.configuration.view_schema_name
@logger = Sequent.logger
end
##
# Returns the current version from the database
def current_version
Versions.order('version desc').limit(1).first&.version || 0
end
##
# Utility method that creates all tables in the view schema
#
# This method is mainly useful in test scenario to just create
# the entire view schema without replaying the events
def create_view_tables
create_view_schema_if_not_exists
in_view_schema do
Sequent::Core::Migratable.all.flat_map(&:managed_tables).each do |table|
statements = sql_file_to_statements("#{Sequent.configuration.migration_sql_files_directory}/#{table.table_name}.sql") { |raw_sql| raw_sql.remove('%SUFFIX%') }
statements.each { |statement| exec_sql(statement) }
end
end
end
##
# Utility method that replays events for all managed_tables from all Sequent::Core::Projector's
#
# This method is mainly useful in test scenario's or development tasks
def replay_all!
replay!(Sequent.configuration.online_replay_persistor_class.new)
end
##
# Utility method that creates the view_schema and the meta data tables
#
# This method is mainly useful during an initial setup of the view schema
def create_view_schema_if_not_exists
exec_sql(%Q{CREATE SCHEMA IF NOT EXISTS #{view_schema}})
in_view_schema do
exec_sql(%Q{CREATE TABLE IF NOT EXISTS #{Versions.table_name} (version integer NOT NULL, CONSTRAINT version_pk PRIMARY KEY(version))})
exec_sql(%Q{CREATE TABLE IF NOT EXISTS #{ReplayedIds.table_name} (event_id bigint NOT NULL, CONSTRAINT event_id_pk PRIMARY KEY(event_id))})
end
end
def plan
@plan ||= Planner.new(Sequent.migration_class.versions).plan(current_version, Sequent.new_version)
end
def executor
@executor ||= Executor.new
end
##
# First part of a view schema migration
#
# Call this method while your application is running.
# The online part consists of:
#
# 1. Ensure any previous migrations are cleaned up
# 2. Create new tables for the Projectors which need to be migrated to the new version
# These tables will be called `table_name_VERSION`.
# 3. Replay all events to populate the tables
# It keeps track of all events that are already replayed.
#
# If anything fails an exception is raised and everything is rolled back
#
def migrate_online
return if Sequent.new_version == current_version
ensure_version_correct!
in_view_schema do
truncate_replay_ids_table!
drop_old_tables(Sequent.new_version)
executor.execute_online(plan)
end
if plan.projectors.any?
replay!(Sequent.configuration.online_replay_persistor_class.new)
end
rescue Exception => e
rollback_migration
raise e
end
##
# Last part of a view schema migration
#
# +You have to ensure no events are being added to the event store while this method is running.+
# For instance put your application in maintenance mode.
#
# The offline part consists of:
#
# 1. Replay all events not yet replayed since #migration_online
# 2. Within a single transaction do:
# 2.1 Rename current tables with the +current version+ as SUFFIX
# 2.2 Rename the new tables and remove the +new version+ suffix
# 2.3 Add the new version in the +Versions+ table
# 3. Performs cleanup of replayed event ids
#
# If anything fails an exception is raised and everything is rolled back
#
# When this method succeeds you can safely start the application from Sequent's point of view.
#
def migrate_offline
return if Sequent.new_version == current_version
ensure_version_correct!
executor.set_table_names_to_new_version(plan)
# 1 replay events not yet replayed
replay!(Sequent.configuration.offline_replay_persistor_class.new, exclude_ids: true, group_exponent: 1) if plan.projectors.any?
in_view_schema do
Sequent::ApplicationRecord.transaction do
# 2.1, 2.2
executor.execute_offline(plan, current_version)
# 2.3 Create migration record
Versions.create!(version: Sequent.new_version)
end
# 3. Truncate replayed ids
truncate_replay_ids_table!
end
logger.info "Migrated to version #{Sequent.new_version}"
rescue Exception => e
rollback_migration
raise e
end
private
def ensure_version_correct!
create_view_schema_if_not_exists
new_version = Sequent.new_version
fail ArgumentError.new("new_version [#{new_version}] must be greater or equal to current_version [#{current_version}]") if new_version < current_version
end
def replay!(replay_persistor, projectors: plan.projectors, exclude_ids: false, group_exponent: 3)
logger.info "group_exponent: #{group_exponent.inspect}"
with_sequent_config(replay_persistor, projectors) do
logger.info "Start replaying events"
time("#{16 ** group_exponent} groups replayed") do
event_types = projectors.flat_map { |projector| projector.message_mapping.keys }.uniq.map(&:name)
disconnect!
number_of_groups = 16 ** group_exponent
groups = groups_of_aggregate_id_prefixes(number_of_groups)
@connected = false
# using `map_with_index` because https://github.com/grosser/parallel/issues/175
result = Parallel.map_with_index(groups, in_processes: Sequent.configuration.number_of_replay_processes) do |aggregate_prefixes, index|
begin
@connected ||= establish_connection
time("Group (#{aggregate_prefixes.first}-#{aggregate_prefixes.last}) #{index + 1}/#{number_of_groups} replayed") do
replay_events(aggregate_prefixes, event_types, exclude_ids, replay_persistor, &insert_ids)
end
nil
rescue => e
logger.error "Replaying failed for ids: ^#{aggregate_prefixes.first} - #{aggregate_prefixes.last}"
logger.error "+++++++++++++++ ERROR +++++++++++++++"
recursively_print(e)
raise Parallel::Kill # immediately kill all sub-processes
end
end
establish_connection
fail if result.nil?
end
end
end
def replay_events(aggregate_prefixes, event_types, exclude_already_replayed, replay_persistor, &on_progress)
Sequent.configuration.event_store.replay_events_from_cursor(
block_size: 1000,
get_events: -> { event_stream(aggregate_prefixes, event_types, exclude_already_replayed) },
on_progress: on_progress
)
replay_persistor.commit
# Also commit all specific declared replay persistors on projectors.
Sequent.configuration.event_handlers.select { |e| e.class.replay_persistor }.each(&:commit)
end
def rollback_migration
disconnect!
establish_connection
drop_old_tables(Sequent.new_version)
truncate_replay_ids_table!
executor.reset_table_names(plan)
end
def truncate_replay_ids_table!
exec_sql("truncate table #{ReplayedIds.table_name}")
end
def groups_of_aggregate_id_prefixes(number_of_groups)
all_prefixes = (0...16 ** LENGTH_OF_SUBSTRING_INDEX_ON_AGGREGATE_ID_IN_EVENT_STORE).to_a.map { |i| i.to_s(16) } # first x digits of hex
all_prefixes = all_prefixes.map { |s| s.length == 3 ? s : "#{"0" * (3 - s.length)}#{s}" }
logger.info "Number of groups #{number_of_groups}"
logger.debug "Prefixes: #{all_prefixes.length}"
fail "Can not have more groups #{number_of_groups} than number of prefixes #{all_prefixes.length}" if number_of_groups > all_prefixes.length
all_prefixes.each_slice(all_prefixes.length / number_of_groups).to_a
end
def in_view_schema
Sequent::Support::Database.with_schema_search_path(view_schema, db_config) do
yield
end
end
def drop_old_tables(new_version)
versions_to_check = (current_version - 10)..new_version
old_tables = versions_to_check.flat_map do |old_version|
exec_sql(
"select table_name from information_schema.tables where table_schema = '#{Sequent.configuration.view_schema_name}' and table_name LIKE '%_#{old_version}'"
).flat_map { |row| row.values }
end
old_tables.each do |old_table|
exec_sql("DROP TABLE #{Sequent.configuration.view_schema_name}.#{old_table} CASCADE")
end
end
def insert_ids
->(progress, done, ids) do
exec_sql("insert into #{ReplayedIds.table_name} (event_id) values #{ids.map { |id| "(#{id})" }.join(',')}") unless ids.empty?
Sequent::Core::EventStore::PRINT_PROGRESS[progress, done, ids] if progress > 0
end
end
def with_sequent_config(replay_persistor, projectors, &block)
old_config = Sequent.configuration
config = Sequent.configuration.dup
replay_projectors = projectors.map { |projector_class| projector_class.new(projector_class.replay_persistor || replay_persistor) }
config.transaction_provider = Sequent::Core::Transactions::NoTransactions.new
config.event_handlers = replay_projectors
Sequent::Configuration.restore(config)
block.call
ensure
Sequent::Configuration.restore(old_config)
end
def event_stream(aggregate_prefixes, event_types, exclude_already_replayed)
fail ArgumentError.new("aggregate_prefixes is mandatory") unless aggregate_prefixes.present?
event_stream = Sequent.configuration.event_record_class.where(event_type: event_types)
event_stream = event_stream.where("substring(aggregate_id::varchar from 1 for #{LENGTH_OF_SUBSTRING_INDEX_ON_AGGREGATE_ID_IN_EVENT_STORE}) in (?)", aggregate_prefixes)
event_stream = event_stream.where("NOT EXISTS (SELECT 1 FROM #{ReplayedIds.table_name} WHERE event_id = event_records.id)") if exclude_already_replayed
event_stream = event_stream.where("event_records.created_at > ?", 1.day.ago) if exclude_already_replayed
event_stream.order('sequence_number ASC').select('id, event_type, event_json, sequence_number')
end
## shortcut methods
def disconnect!
Sequent::Support::Database.disconnect!
end
def establish_connection
Sequent::Support::Database.establish_connection(db_config)
end
end
end
end
|
#!/bin/sh
/usr/lib/riot-desktop/dist/electron /usr/lib/riot-desktop/electron_app "$@"
|
#!/bin/bash
#
# This script will remove created PlatformIO compatible folder structure.
# Files are mostly references to original files, and the original files
# will not be deleted. So generally said it safe to run this script.
# platformio.ini files will be removed alog with any additional added
# files.
#
cd ../examples-pio || exit 1
for example in IotWebConf*; do
echo "Removing pio example ${example}"
rm -rf ${example} || exit $?
done |
package com.sawert.sandbox.spring.mvc.service.impl;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.sawert.sandbox.spring.mvc.model.TestModel;
import com.sawert.sandbox.spring.mvc.model.TestModels;
import com.sawert.sandbox.spring.mvc.service.TestModelService;
/**
* Implementation of TestModelService interface.
*
* @author bsawert
*
*/
public class TestModelServiceImpl implements TestModelService {
private static final Log log = LogFactory.getLog(TestModelServiceImpl.class);
/* (non-Javadoc)
* @see com.sawert.sandbox.spring.mvc.service.TestModelService#getModelById(java.lang.String)
*/
public TestModel getModelById(String id) {
if (log.isDebugEnabled()) {
log.debug("Calling getModelById stub.");
}
return null;
}
/* (non-Javadoc)
* @see com.sawert.sandbox.spring.mvc.service.TestModelService#getAllModels()
*/
public TestModels getAllModels() {
if (log.isDebugEnabled()) {
log.debug("Calling getAllModels stub.");
}
return null;
}
}
|
SELECT address AS 'Address',
name AS 'Name',
phone AS 'Phone'
FROM customers |
struct KeyValueStore {
store: std::collections::HashMap<String, String>,
}
impl KeyValueStore {
fn new() -> KeyValueStore {
KeyValueStore {
store: std::collections::HashMap::new(),
}
}
fn insert(&mut self, key: String, value: String) {
self.store.insert(key, value);
}
fn get(&self, key: &str) -> Option<&String> {
self.store.get(key)
}
fn delete(&mut self, key: &str) {
self.store.remove(key);
}
}
fn main() {
let mut kv_store = KeyValueStore::new();
kv_store.insert("name".to_string(), "John".to_string());
kv_store.insert("age".to_string(), "30".to_string());
match kv_store.get("name") {
Some(value) => println!("Name: {}", value),
None => println!("Name not found"),
}
kv_store.delete("age");
match kv_store.get("age") {
Some(value) => println!("Age: {}", value),
None => println!("Age not found"),
}
} |
#!/bin/bash
# This file contains some utilities to test the elasticsearch scripts,
# the .deb/.rpm packages and the SysV/Systemd scripts.
# WARNING: This testing file must be executed as root and can
# dramatically change your system. It should only be executed
# in a throw-away VM like those made by the Vagrantfile at
# the root of the Elasticsearch source code. This should
# cause the script to fail if it is executed any other way:
[ -f /etc/is_vagrant_vm ] || {
>&2 echo "must be run on a vagrant VM"
exit 1
}
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Checks if necessary commands are available to run the tests
if [ ! -x /usr/bin/which ]; then
echo "'which' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which wget 2>/dev/null`" ]; then
echo "'wget' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which curl 2>/dev/null`" ]; then
echo "'curl' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which pgrep 2>/dev/null`" ]; then
echo "'pgrep' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which unzip 2>/dev/null`" ]; then
echo "'unzip' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which tar 2>/dev/null`" ]; then
echo "'tar' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which unzip 2>/dev/null`" ]; then
echo "'unzip' command is mandatory to run the tests"
exit 1
fi
if [ ! -x "`which java 2>/dev/null`" ]; then
echo "'java' command is mandatory to run the tests"
exit 1
fi
# Returns 0 if the 'dpkg' command is available
is_dpkg() {
[ -x "`which dpkg 2>/dev/null`" ]
}
# Returns 0 if the 'rpm' command is available
is_rpm() {
[ -x "`which rpm 2>/dev/null`" ]
}
# Skip test if the 'dpkg' command is not supported
skip_not_dpkg() {
is_dpkg || skip "dpkg is not supported"
}
# Skip test if the 'rpm' command is not supported
skip_not_rpm() {
is_rpm || skip "rpm is not supported"
}
skip_not_dpkg_or_rpm() {
is_dpkg || is_rpm || skip "only dpkg or rpm systems are supported"
}
# Returns 0 if the system supports Systemd
is_systemd() {
[ -x /bin/systemctl ]
}
# Skip test if Systemd is not supported
skip_not_systemd() {
if [ ! -x /bin/systemctl ]; then
skip "systemd is not supported"
fi
}
# Returns 0 if the system supports SysV
is_sysvinit() {
[ -x "`which service 2>/dev/null`" ]
}
# Skip test if SysV is not supported
skip_not_sysvinit() {
if [ -x "`which service 2>/dev/null`" ] && is_systemd; then
skip "sysvinit is supported, but systemd too"
fi
if [ ! -x "`which service 2>/dev/null`" ]; then
skip "sysvinit is not supported"
fi
}
# Skip if tar is not supported
skip_not_tar_gz() {
if [ ! -x "`which tar 2>/dev/null`" ]; then
skip "tar is not supported"
fi
}
# Skip if unzip is not supported
skip_not_zip() {
if [ ! -x "`which unzip 2>/dev/null`" ]; then
skip "unzip is not supported"
fi
}
assert_file_exist() {
local file="$1"
if [ ! -e "$file" ]; then
echo "Should exist: ${file} but does not"
fi
local file=$(readlink -m "${file}")
[ -e "$file" ]
}
assert_file_not_exist() {
local file="$1"
if [ -e "$file" ]; then
echo "Should not exist: ${file} but does"
fi
local file=$(readlink -m "${file}")
[ ! -e "$file" ]
}
assert_file() {
local file="$1"
local type=$2
local user=$3
local group=$4
local privileges=$5
assert_file_exist "$file"
if [ "$type" = "d" ]; then
if [ ! -d "$file" ]; then
echo "[$file] should be a directory but is not"
fi
[ -d "$file" ]
else
if [ ! -f "$file" ]; then
echo "[$file] should be a regular file but is not"
fi
[ -f "$file" ]
fi
if [ "x$user" != "x" ]; then
realuser=$(find "$file" -maxdepth 0 -printf "%u")
if [ "$realuser" != "$user" ]; then
echo "Expected user: $user, found $realuser [$file]"
fi
[ "$realuser" = "$user" ]
fi
if [ "x$group" != "x" ]; then
realgroup=$(find "$file" -maxdepth 0 -printf "%g")
if [ "$realgroup" != "$group" ]; then
echo "Expected group: $group, found $realgroup [$file]"
fi
[ "$realgroup" = "$group" ]
fi
if [ "x$privileges" != "x" ]; then
realprivileges=$(find "$file" -maxdepth 0 -printf "%m")
if [ "$realprivileges" != "$privileges" ]; then
echo "Expected privileges: $privileges, found $realprivileges [$file]"
fi
[ "$realprivileges" = "$privileges" ]
fi
}
assert_module_or_plugin_directory() {
local directory=$1
shift
#owner group and permissions vary depending on how es was installed
#just make sure that everything is the same as $CONFIG_DIR, which was properly set up during install
config_user=$(find "$ESHOME" -maxdepth 0 -printf "%u")
config_owner=$(find "$ESHOME" -maxdepth 0 -printf "%g")
assert_file $directory d $config_user $config_owner 755
}
assert_module_or_plugin_file() {
local file=$1
shift
assert_file_exist "$(readlink -m $file)"
assert_file $file f $config_user $config_owner 644
}
assert_output() {
echo "$output" | grep -E "$1"
}
assert_recursive_ownership() {
local directory=$1
local user=$2
local group=$3
realuser=$(find $directory -printf "%u\n" | sort | uniq)
[ "$realuser" = "$user" ]
realgroup=$(find $directory -printf "%g\n" | sort | uniq)
[ "$realgroup" = "$group" ]
}
# Deletes everything before running a test file
clean_before_test() {
# List of files to be deleted
ELASTICSEARCH_TEST_FILES=("/usr/share/elasticsearch" \
"/etc/elasticsearch" \
"/var/lib/elasticsearch" \
"/var/log/elasticsearch" \
"/tmp/elasticsearch" \
"/etc/default/elasticsearch" \
"/etc/sysconfig/elasticsearch" \
"/var/run/elasticsearch" \
"/usr/share/doc/elasticsearch" \
"/tmp/elasticsearch" \
"/usr/lib/systemd/system/elasticsearch.conf" \
"/usr/lib/tmpfiles.d/elasticsearch.conf" \
"/usr/lib/sysctl.d/elasticsearch.conf")
# Kills all processes of user elasticsearch
if id elasticsearch > /dev/null 2>&1; then
pkill -u elasticsearch 2>/dev/null || true
fi
# Kills all running Elasticsearch processes
ps aux | grep -i "org.elasticsearch.bootstrap.Elasticsearch" | awk {'print $2'} | xargs kill -9 > /dev/null 2>&1 || true
purge_elasticsearch
# Removes user & group
userdel elasticsearch > /dev/null 2>&1 || true
groupdel elasticsearch > /dev/null 2>&1 || true
# Removes all files
for d in "${ELASTICSEARCH_TEST_FILES[@]}"; do
if [ -e "$d" ]; then
rm -rf "$d"
fi
done
}
purge_elasticsearch() {
# Removes RPM package
if is_rpm; then
rpm --quiet -e elasticsearch > /dev/null 2>&1 || true
fi
if [ -x "`which yum 2>/dev/null`" ]; then
yum remove -y elasticsearch > /dev/null 2>&1 || true
fi
# Removes DEB package
if is_dpkg; then
dpkg --purge elasticsearch > /dev/null 2>&1 || true
fi
if [ -x "`which apt-get 2>/dev/null`" ]; then
apt-get --quiet --yes purge elasticsearch > /dev/null 2>&1 || true
fi
}
# Start elasticsearch and wait for it to come up with a status.
# $1 - expected status - defaults to green
start_elasticsearch_service() {
local desiredStatus=${1:-green}
local index=$2
local commandLineArgs=$3
run_elasticsearch_service 0 $commandLineArgs
wait_for_elasticsearch_status $desiredStatus $index
if [ -r "/tmp/elasticsearch/elasticsearch.pid" ]; then
pid=$(cat /tmp/elasticsearch/elasticsearch.pid)
[ "x$pid" != "x" ] && [ "$pid" -gt 0 ]
echo "Looking for elasticsearch pid...."
ps $pid
elif is_systemd; then
run systemctl is-active elasticsearch.service
[ "$status" -eq 0 ]
run systemctl status elasticsearch.service
[ "$status" -eq 0 ]
elif is_sysvinit; then
run service elasticsearch status
[ "$status" -eq 0 ]
fi
}
# Start elasticsearch
# $1 expected status code
# $2 additional command line args
run_elasticsearch_service() {
local expectedStatus=$1
local commandLineArgs=$2
# Set the CONF_DIR setting in case we start as a service
if [ ! -z "$CONF_DIR" ] ; then
if is_dpkg ; then
echo "CONF_DIR=$CONF_DIR" >> /etc/default/elasticsearch;
echo "ES_JVM_OPTIONS=$ES_JVM_OPTIONS" >> /etc/default/elasticsearch;
elif is_rpm; then
echo "CONF_DIR=$CONF_DIR" >> /etc/sysconfig/elasticsearch;
echo "ES_JVM_OPTIONS=$ES_JVM_OPTIONS" >> /etc/sysconfig/elasticsearch
fi
fi
if [ -f "/tmp/elasticsearch/bin/elasticsearch" ]; then
if [ -z "$CONF_DIR" ]; then
local CONF_DIR=""
local ES_PATH_CONF=""
else
local ES_PATH_CONF="-Epath.conf=$CONF_DIR"
fi
# we must capture the exit code to compare so we don't want to start as background process in case we expect something other than 0
local background=""
local timeoutCommand=""
if [ "$expectedStatus" = 0 ]; then
background="-d"
else
timeoutCommand="timeout 60s "
fi
# su and the Elasticsearch init script work together to break bats.
# sudo isolates bats enough from the init script so everything continues
# to tick along
run sudo -u elasticsearch bash <<BASH
# If jayatana is installed then we try to use it. Elasticsearch should ignore it even when we try.
# If it doesn't ignore it then Elasticsearch will fail to start because of security errors.
# This line is attempting to emulate the on login behavior of /usr/share/upstart/sessions/jayatana.conf
[ -f /usr/share/java/jayatanaag.jar ] && export JAVA_TOOL_OPTIONS="-javaagent:/usr/share/java/jayatanaag.jar"
# And now we can start Elasticsearch normally, in the background (-d) and with a pidfile (-p).
export ES_JVM_OPTIONS=$ES_JVM_OPTIONS
export ES_JAVA_OPTS=$ES_JAVA_OPTS
$timeoutCommand/tmp/elasticsearch/bin/elasticsearch $background -p /tmp/elasticsearch/elasticsearch.pid $ES_PATH_CONF $commandLineArgs
BASH
[ "$status" -eq "$expectedStatus" ]
elif is_systemd; then
run systemctl daemon-reload
[ "$status" -eq 0 ]
run systemctl enable elasticsearch.service
[ "$status" -eq 0 ]
run systemctl is-enabled elasticsearch.service
[ "$status" -eq 0 ]
run systemctl start elasticsearch.service
[ "$status" -eq "$expectedStatus" ]
elif is_sysvinit; then
run service elasticsearch start
[ "$status" -eq "$expectedStatus" ]
fi
}
stop_elasticsearch_service() {
if [ -r "/tmp/elasticsearch/elasticsearch.pid" ]; then
pid=$(cat /tmp/elasticsearch/elasticsearch.pid)
[ "x$pid" != "x" ] && [ "$pid" -gt 0 ]
kill -SIGTERM $pid
elif is_systemd; then
run systemctl stop elasticsearch.service
[ "$status" -eq 0 ]
run systemctl is-active elasticsearch.service
[ "$status" -eq 3 ]
echo "$output" | grep -E 'inactive|failed'
elif is_sysvinit; then
run service elasticsearch stop
[ "$status" -eq 0 ]
run service elasticsearch status
[ "$status" -ne 0 ]
fi
}
# Waits for Elasticsearch to reach some status.
# $1 - expected status - defaults to green
wait_for_elasticsearch_status() {
local desiredStatus=${1:-green}
local index=$2
echo "Making sure elasticsearch is up..."
wget -O - --retry-connrefused --waitretry=1 --timeout=60 --tries 60 http://localhost:9200/_cluster/health || {
echo "Looks like elasticsearch never started. Here is its log:"
if [ -e "$ESLOG/elasticsearch.log" ]; then
cat "$ESLOG/elasticsearch.log"
else
echo "The elasticsearch log doesn't exist. Maybe /var/log/messages has something:"
tail -n20 /var/log/messages
fi
false
}
if [ -z "index" ]; then
echo "Tring to connect to elasticsearch and wait for expected status $desiredStatus..."
curl -sS "http://localhost:9200/_cluster/health?wait_for_status=$desiredStatus&timeout=60s&pretty"
else
echo "Trying to connect to elasticsearch and wait for expected status $desiredStatus for index $index"
curl -sS "http://localhost:9200/_cluster/health/$index?wait_for_status=$desiredStatus&timeout=60s&pretty"
fi
if [ $? -eq 0 ]; then
echo "Connected"
else
echo "Unable to connect to Elasticsearch"
false
fi
echo "Checking that the cluster health matches the waited for status..."
run curl -sS -XGET 'http://localhost:9200/_cat/health?h=status&v=false'
if [ "$status" -ne 0 ]; then
echo "error when checking cluster health. code=$status output="
echo $output
false
fi
echo $output | grep $desiredStatus || {
echo "unexpected status: '$output' wanted '$desiredStatus'"
false
}
}
# Checks the current elasticsearch version using the Info REST endpoint
# $1 - expected version
check_elasticsearch_version() {
local version=$1
local versionToCheck=$(echo $version | sed -e 's/-SNAPSHOT//')
run curl -s localhost:9200
[ "$status" -eq 0 ]
echo $output | grep \"number\"\ :\ \"$versionToCheck\" || {
echo "Installed an unexpected version:"
curl -s localhost:9200
false
}
}
install_elasticsearch_test_scripts() {
install_script is_guide.painless
install_script is_guide.mustache
}
# Executes some basic Elasticsearch tests
run_elasticsearch_tests() {
# TODO this assertion is the same the one made when waiting for
# elasticsearch to start
run curl -XGET 'http://localhost:9200/_cat/health?h=status&v=false'
[ "$status" -eq 0 ]
echo "$output" | grep -w "green"
curl -s -H "Content-Type: application/json" -XPOST 'http://localhost:9200/library/book/1?refresh=true&pretty' -d '{
"title": "Book #1",
"pages": 123
}'
curl -s -H "Content-Type: application/json" -XPOST 'http://localhost:9200/library/book/2?refresh=true&pretty' -d '{
"title": "Book #2",
"pages": 456
}'
curl -s -XGET 'http://localhost:9200/_count?pretty' |
grep \"count\"\ :\ 2
curl -s -H "Content-Type: application/json" -XPOST 'http://localhost:9200/library/book/_count?pretty' -d '{
"query": {
"script": {
"script": {
"file": "is_guide",
"lang": "painless",
"params": {
"min_num_pages": 100
}
}
}
}
}' | grep \"count\"\ :\ 2
curl -s -H "Content-Type: application/json" -XGET 'http://localhost:9200/library/book/_search/template?pretty' -d '{
"file": "is_guide"
}' | grep \"total\"\ :\ 1
curl -s -XDELETE 'http://localhost:9200/_all'
}
# Move the config directory to another directory and properly chown it.
move_config() {
local oldConfig="$ESCONFIG"
export ESCONFIG="${1:-$(mktemp -d -t 'config.XXXX')}"
echo "Moving configuration directory from $oldConfig to $ESCONFIG"
# Move configuration files to the new configuration directory
mv "$oldConfig"/* "$ESCONFIG"
chown -R elasticsearch:elasticsearch "$ESCONFIG"
assert_file_exist "$ESCONFIG/elasticsearch.yml"
assert_file_exist "$ESCONFIG/jvm.options"
assert_file_exist "$ESCONFIG/log4j2.properties"
}
# Copies a script into the Elasticsearch install.
install_script() {
local name=$1
mkdir -p $ESSCRIPTS
local script="$BATS_TEST_DIRNAME/example/scripts/$name"
echo "Installing $script to $ESSCRIPTS"
cp $script $ESSCRIPTS
}
# permissions from the user umask with the executable bit set
executable_privileges_for_user_from_umask() {
local user=$1
shift
echo $((0777 & ~$(sudo -E -u $user sh -c umask) | 0111))
}
# permissions from the user umask without the executable bit set
file_privileges_for_user_from_umask() {
local user=$1
shift
echo $((0777 & ~$(sudo -E -u $user sh -c umask) & ~0111))
}
|
void processTransactionTags(int tag, bool tx, std::vector<std::string>& tags) {
if (tag & MC_MTX_TAG_ASSET_TRANSFER) {
if (tag & MC_MTX_TAG_LICENSE_TOKEN) {
tags.push_back(tx ? "transfer-license" : "license");
} else {
tags.push_back(tx ? "transfer-asset" : "asset");
}
}
if (tag & MC_MTX_TAG_MULTIPLE_ASSETS) {
// Additional processing for MC_MTX_TAG_MULTIPLE_ASSETS
// Add corresponding tags based on the flag
}
} |
def addNumber(num1, num2):
return num1 + num2
total = addNumber(2, 3)
print("The total is:", total) |
<reponame>justanr/xyz
from unittest import mock
from datetime import datetime
import pytest
@pytest.fixture
def clock():
clock = mock.create_autospec(datetime)
clock.now.return_value = datetime(2015, 10, 11)
return clock
|
<gh_stars>0
package document
import "github.com/jensneuse/graphql-go-tools/pkg/lexing/position"
// Value as specified in http://facebook.github.io/graphql/draft/#Value
type Value struct {
ValueType ValueType
Reference int
Position position.Position
Raw ByteSliceReference
}
func (v Value) NodeSelectionSet() int {
panic("implement me")
}
func (v Value) NodeInputFieldsDefinition() int {
panic("implement me")
}
func (v Value) NodeInputValueDefinitions() InputValueDefinitions {
panic("implement me")
}
func (v Value) NodePosition() position.Position {
return v.Position
}
func (v Value) NodeValueType() ValueType {
return v.ValueType
}
func (v Value) NodeValueReference() int {
return v.Reference
}
func (v Value) NodeName() ByteSliceReference {
panic("implement me")
}
func (v Value) NodeAlias() ByteSliceReference {
panic("implement me")
}
func (v Value) NodeDescription() ByteSliceReference {
panic("implement me")
}
func (v Value) NodeArgumentSet() int {
panic("implement me")
}
func (v Value) NodeArgumentsDefinition() int {
panic("implement me")
}
func (v Value) NodeDirectiveSet() int {
panic("implement me")
}
func (v Value) NodeEnumValuesDefinition() EnumValueDefinitions {
panic("implement me")
}
func (v Value) NodeFields() []int {
panic("implement me")
}
func (v Value) NodeFieldsDefinition() FieldDefinitions {
panic("implement me")
}
func (v Value) NodeFragmentSpreads() []int {
panic("implement me")
}
func (v Value) NodeInlineFragments() []int {
panic("implement me")
}
func (v Value) NodeVariableDefinitions() []int {
panic("implement me")
}
func (v Value) NodeType() int {
panic("implement me")
}
func (v Value) NodeOperationType() OperationType {
panic("implement me")
}
func (v Value) NodeValue() int {
panic("implement me")
}
func (v Value) NodeDefaultValue() int {
panic("implement me")
}
func (v Value) NodeImplementsInterfaces() ByteSliceReferences {
panic("implement me")
}
func (v Value) NodeSchemaDefinition() SchemaDefinition {
panic("implement me")
}
func (v Value) NodeScalarTypeDefinitions() []int {
panic("implement me")
}
func (v Value) NodeObjectTypeDefinitions() []int {
panic("implement me")
}
func (v Value) NodeInterfaceTypeDefinitions() []int {
panic("implement me")
}
func (v Value) NodeUnionTypeDefinitions() []int {
panic("implement me")
}
func (v Value) NodeEnumTypeDefinitions() []int {
panic("implement me")
}
func (v Value) NodeInputObjectTypeDefinitions() []int {
panic("implement me")
}
func (v Value) NodeDirectiveDefinitions() []int {
panic("implement me")
}
func (v Value) NodeUnionMemberTypes() []int {
panic("implement me")
}
type ListValue []int
type ObjectValue []int
|
<filename>src/constants/breakpoints.js
export const SM_BREAKPOINT = 414;
export const MD_BREAKPOINT = 768;
export const LG_BREAKPOINT = 1024;
|
export { formatPhoneNumber } from './formatPhoneNumber';
export { getUserFromAuthToken } from './getUserWithAuthToken';
export {
deriveStatus,
meetingStatus,
meetingStatusToClassName,
} from './meetings';
export { sleep } from './sleep';
|
#!/bin/sh
# 若无法执行 运行命令 chmod +x deploy.sh
# 当前时间
date=`date +%F`
# 编译后资源所在的文件名
dist_name="dist"
# 压缩后的文件名
file_name="dist-${date}.tar.gz"
# 服务器用户名
user="hl"
# 服务器地址
host="dn5"
# 服务器上资源所在的路劲
pwd="/home/app/frontend/recommender-app/"
# 删除之前的文件
rm -rf ${dist_name}
rm -rf *.tar.gz
# 构建项目
npm run build
# 压缩打包
tar -zcvf ${file_name} ${dist_name}
# 上传到服务器
scp ${file_name} ${user}@${host}:${pwd}
# 登录到目标服务器并发布
ssh ${user}@${host} "cd ${pwd};tar -zxvf ${file_name} ${dist_name}"
echo "发布成功" |
#!/bin/bash
docker run -it -v $HOME/.kube/config:/root/.kube/config -v ${PWD}:/app wbuchwalter/k8s-scaling-notifier /bin/bash |
#!/bin/bash
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# Set $FEATURE_EXTRACTOR_DIR to control where the feature extractor is saved.
CWD=$(pwd)
export PYTHONPATH=$PYTHONPATH:$CWD/..
set -u
export TRAINING_LAUNCHER=submitit_local
export MSD_LAUNCHER=submitit_local
python ../train_cifar10.py rng_seed=$RANDOM hydra.sweep.dir=$FEATURE_EXTRACTOR_DIR hydra.sweep.subdir=. hydra/launcher=$TRAINING_LAUNCHER -m
python ../feature_corrupt_error.py hydra/launcher=$MSD_LAUNCHER rng_seed='${hydra:job.num}' ft.params.dataset_cfg.weights=$FEATURE_EXTRACTOR_DIR/checkpoints/model_epoch_0100.pyth train.params.aug_string=--,autocontrast-3,equalize-3,posterize-3,solarize-3,shear_x-3,shear_y-3,translate_x-3,translate_y-3,rotate-3,autocontrast-3--equalize-3,autocontrast-3--posterize-3,autocontrast-3--solarize-3,autocontrast-3--shear_x-3,autocontrast-3--shear_y-3,autocontrast-3--translate_x-3,autocontrast-3--translate_y-3,autocontrast-3--rotate-3,equalize-3--posterize-3,equalize-3--solarize-3,equalize-3--shear_x-3,equalize-3--shear_y-3,equalize-3--translate_x-3,equalize-3--translate_y-3,equalize-3--rotate-3,posterize-3--solarize-3,posterize-3--shear_x-3,posterize-3--shear_y-3,posterize-3--translate_x-3,posterize-3--translate_y-3,posterize-3--rotate-3,solarize-3--shear_x-3,solarize-3--shear_y-3,solarize-3--translate_x-3,solarize-3--translate_y-3,solarize-3--rotate-3,shear_x-3--shear_y-3,shear_x-3--translate_x-3,shear_x-3--translate_y-3,shear_x-3--rotate-3,shear_y-3--translate_x-3,shear_y-3--translate_y-3,shear_y-3--rotate-3,translate_x-3--translate_y-3,translate_x-3--rotate-3,translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3,autocontrast-3--equalize-3--solarize-3,autocontrast-3--equalize-3--shear_x-3,autocontrast-3--equalize-3--shear_y-3,autocontrast-3--equalize-3--translate_x-3,autocontrast-3--equalize-3--translate_y-3,autocontrast-3--equalize-3--rotate-3,autocontrast-3--posterize-3--solarize-3,autocontrast-3--posterize-3--shear_x-3,autocontrast-3--posterize-3--shear_y-3,autocontrast-3--posterize-3--translate_x-3,autocontrast-3--posterize-3--translate_y-3,autocontrast-3--posterize-3--rotate-3,autocontrast-3--solarize-3--shear_x-3,autocontrast-3--solarize-3--shear_y-3,autocontrast-3--solarize-3--translate_x-3,autocontrast-3--solarize-3--translate_y-3,autocontrast-3--solarize-3--rotate-3,autocontrast-3--shear_x-3--shear_y-3,autocontrast-3--shear_x-3--translate_x-3,autocontrast-3--shear_x-3--translate_y-3,autocontrast-3--shear_x-3--rotate-3,autocontrast-3--shear_y-3--translate_x-3,autocontrast-3--shear_y-3--translate_y-3,autocontrast-3--shear_y-3--rotate-3,autocontrast-3--translate_x-3--translate_y-3,autocontrast-3--translate_x-3--rotate-3,autocontrast-3--translate_y-3--rotate-3,equalize-3--posterize-3--solarize-3,equalize-3--posterize-3--shear_x-3,equalize-3--posterize-3--shear_y-3,equalize-3--posterize-3--translate_x-3,equalize-3--posterize-3--translate_y-3,equalize-3--posterize-3--rotate-3,equalize-3--solarize-3--shear_x-3,equalize-3--solarize-3--shear_y-3,equalize-3--solarize-3--translate_x-3,equalize-3--solarize-3--translate_y-3,equalize-3--solarize-3--rotate-3,equalize-3--shear_x-3--shear_y-3,equalize-3--shear_x-3--translate_x-3,equalize-3--shear_x-3--translate_y-3,equalize-3--shear_x-3--rotate-3,equalize-3--shear_y-3--translate_x-3,equalize-3--shear_y-3--translate_y-3,equalize-3--shear_y-3--rotate-3,equalize-3--translate_x-3--translate_y-3,equalize-3--translate_x-3--rotate-3,equalize-3--translate_y-3--rotate-3,posterize-3--solarize-3--shear_x-3,posterize-3--solarize-3--shear_y-3,posterize-3--solarize-3--translate_x-3,posterize-3--solarize-3--translate_y-3,posterize-3--solarize-3--rotate-3 -m
python ../feature_corrupt_error.py hydra/launcher=$MSD_LAUNCHER rng_seed='${hydra:job.num}' ft.params.dataset_cfg.weights=$FEATURE_EXTRACTOR_DIR/checkpoints/model_epoch_0100.pyth train.params.aug_string=posterize-3--shear_x-3--shear_y-3,posterize-3--shear_x-3--translate_x-3,posterize-3--shear_x-3--translate_y-3,posterize-3--shear_x-3--rotate-3,posterize-3--shear_y-3--translate_x-3,posterize-3--shear_y-3--translate_y-3,posterize-3--shear_y-3--rotate-3,posterize-3--translate_x-3--translate_y-3,posterize-3--translate_x-3--rotate-3,posterize-3--translate_y-3--rotate-3,solarize-3--shear_x-3--shear_y-3,solarize-3--shear_x-3--translate_x-3,solarize-3--shear_x-3--translate_y-3,solarize-3--shear_x-3--rotate-3,solarize-3--shear_y-3--translate_x-3,solarize-3--shear_y-3--translate_y-3,solarize-3--shear_y-3--rotate-3,solarize-3--translate_x-3--translate_y-3,solarize-3--translate_x-3--rotate-3,solarize-3--translate_y-3--rotate-3,shear_x-3--shear_y-3--translate_x-3,shear_x-3--shear_y-3--translate_y-3,shear_x-3--shear_y-3--rotate-3,shear_x-3--translate_x-3--translate_y-3,shear_x-3--translate_x-3--rotate-3,shear_x-3--translate_y-3--rotate-3,shear_y-3--translate_x-3--translate_y-3,shear_y-3--translate_x-3--rotate-3,shear_y-3--translate_y-3--rotate-3,translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3,autocontrast-3--equalize-3--posterize-3--shear_x-3,autocontrast-3--equalize-3--posterize-3--shear_y-3,autocontrast-3--equalize-3--posterize-3--translate_x-3,autocontrast-3--equalize-3--posterize-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_x-3,autocontrast-3--equalize-3--solarize-3--shear_y-3,autocontrast-3--equalize-3--solarize-3--translate_x-3,autocontrast-3--equalize-3--solarize-3--translate_y-3,autocontrast-3--equalize-3--solarize-3--rotate-3,autocontrast-3--equalize-3--shear_x-3--shear_y-3,autocontrast-3--equalize-3--shear_x-3--translate_x-3,autocontrast-3--equalize-3--shear_x-3--translate_y-3,autocontrast-3--equalize-3--shear_x-3--rotate-3,autocontrast-3--equalize-3--shear_y-3--translate_x-3,autocontrast-3--equalize-3--shear_y-3--translate_y-3,autocontrast-3--equalize-3--shear_y-3--rotate-3,autocontrast-3--equalize-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_x-3,autocontrast-3--posterize-3--solarize-3--shear_y-3,autocontrast-3--posterize-3--solarize-3--translate_x-3,autocontrast-3--posterize-3--solarize-3--translate_y-3,autocontrast-3--posterize-3--solarize-3--rotate-3,autocontrast-3--posterize-3--shear_x-3--shear_y-3,autocontrast-3--posterize-3--shear_x-3--translate_x-3,autocontrast-3--posterize-3--shear_x-3--translate_y-3,autocontrast-3--posterize-3--shear_x-3--rotate-3,autocontrast-3--posterize-3--shear_y-3--translate_x-3,autocontrast-3--posterize-3--shear_y-3--translate_y-3,autocontrast-3--posterize-3--shear_y-3--rotate-3,autocontrast-3--posterize-3--translate_x-3--translate_y-3,autocontrast-3--posterize-3--translate_x-3--rotate-3,autocontrast-3--posterize-3--translate_y-3--rotate-3,autocontrast-3--solarize-3--shear_x-3--shear_y-3,autocontrast-3--solarize-3--shear_x-3--translate_x-3,autocontrast-3--solarize-3--shear_x-3--translate_y-3,autocontrast-3--solarize-3--shear_x-3--rotate-3,autocontrast-3--solarize-3--shear_y-3--translate_x-3,autocontrast-3--solarize-3--shear_y-3--translate_y-3,autocontrast-3--solarize-3--shear_y-3--rotate-3,autocontrast-3--solarize-3--translate_x-3--translate_y-3,autocontrast-3--solarize-3--translate_x-3--rotate-3,autocontrast-3--solarize-3--translate_y-3--rotate-3,autocontrast-3--shear_x-3--shear_y-3--translate_x-3,autocontrast-3--shear_x-3--shear_y-3--translate_y-3,autocontrast-3--shear_x-3--shear_y-3--rotate-3,autocontrast-3--shear_x-3--translate_x-3--translate_y-3,autocontrast-3--shear_x-3--translate_x-3--rotate-3,autocontrast-3--shear_x-3--translate_y-3--rotate-3,autocontrast-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--translate_x-3--translate_y-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_x-3,equalize-3--posterize-3--solarize-3--shear_y-3,equalize-3--posterize-3--solarize-3--translate_x-3,equalize-3--posterize-3--solarize-3--translate_y-3,equalize-3--posterize-3--solarize-3--rotate-3,equalize-3--posterize-3--shear_x-3--shear_y-3,equalize-3--posterize-3--shear_x-3--translate_x-3,equalize-3--posterize-3--shear_x-3--translate_y-3,equalize-3--posterize-3--shear_x-3--rotate-3,equalize-3--posterize-3--shear_y-3--translate_x-3,equalize-3--posterize-3--shear_y-3--translate_y-3,equalize-3--posterize-3--shear_y-3--rotate-3,equalize-3--posterize-3--translate_x-3--translate_y-3,equalize-3--posterize-3--translate_x-3--rotate-3 -m
python ../feature_corrupt_error.py hydra/launcher=$MSD_LAUNCHER rng_seed='${hydra:job.num}' ft.params.dataset_cfg.weights=$FEATURE_EXTRACTOR_DIR/checkpoints/model_epoch_0100.pyth train.params.aug_string=equalize-3--posterize-3--translate_y-3--rotate-3,equalize-3--solarize-3--shear_x-3--shear_y-3,equalize-3--solarize-3--shear_x-3--translate_x-3,equalize-3--solarize-3--shear_x-3--translate_y-3,equalize-3--solarize-3--shear_x-3--rotate-3,equalize-3--solarize-3--shear_y-3--translate_x-3,equalize-3--solarize-3--shear_y-3--translate_y-3,equalize-3--solarize-3--shear_y-3--rotate-3,equalize-3--solarize-3--translate_x-3--translate_y-3,equalize-3--solarize-3--translate_x-3--rotate-3,equalize-3--solarize-3--translate_y-3--rotate-3,equalize-3--shear_x-3--shear_y-3--translate_x-3,equalize-3--shear_x-3--shear_y-3--translate_y-3,equalize-3--shear_x-3--shear_y-3--rotate-3,equalize-3--shear_x-3--translate_x-3--translate_y-3,equalize-3--shear_x-3--translate_x-3--rotate-3,equalize-3--shear_x-3--translate_y-3--rotate-3,equalize-3--shear_y-3--translate_x-3--translate_y-3,equalize-3--shear_y-3--translate_x-3--rotate-3,equalize-3--shear_y-3--translate_y-3--rotate-3,equalize-3--translate_x-3--translate_y-3--rotate-3,posterize-3--solarize-3--shear_x-3--shear_y-3,posterize-3--solarize-3--shear_x-3--translate_x-3,posterize-3--solarize-3--shear_x-3--translate_y-3,posterize-3--solarize-3--shear_x-3--rotate-3,posterize-3--solarize-3--shear_y-3--translate_x-3,posterize-3--solarize-3--shear_y-3--translate_y-3,posterize-3--solarize-3--shear_y-3--rotate-3,posterize-3--solarize-3--translate_x-3--translate_y-3,posterize-3--solarize-3--translate_x-3--rotate-3,posterize-3--solarize-3--translate_y-3--rotate-3,posterize-3--shear_x-3--shear_y-3--translate_x-3,posterize-3--shear_x-3--shear_y-3--translate_y-3,posterize-3--shear_x-3--shear_y-3--rotate-3,posterize-3--shear_x-3--translate_x-3--translate_y-3,posterize-3--shear_x-3--translate_x-3--rotate-3,posterize-3--shear_x-3--translate_y-3--rotate-3,posterize-3--shear_y-3--translate_x-3--translate_y-3,posterize-3--shear_y-3--translate_x-3--rotate-3,posterize-3--shear_y-3--translate_y-3--rotate-3,posterize-3--translate_x-3--translate_y-3--rotate-3,solarize-3--shear_x-3--shear_y-3--translate_x-3,solarize-3--shear_x-3--shear_y-3--translate_y-3,solarize-3--shear_x-3--shear_y-3--rotate-3,solarize-3--shear_x-3--translate_x-3--translate_y-3,solarize-3--shear_x-3--translate_x-3--rotate-3,solarize-3--shear_x-3--translate_y-3--rotate-3,solarize-3--shear_y-3--translate_x-3--translate_y-3,solarize-3--shear_y-3--translate_x-3--rotate-3,solarize-3--shear_y-3--translate_y-3--rotate-3,solarize-3--translate_x-3--translate_y-3--rotate-3,shear_x-3--shear_y-3--translate_x-3--translate_y-3,shear_x-3--shear_y-3--translate_x-3--rotate-3,shear_x-3--shear_y-3--translate_y-3--rotate-3,shear_x-3--translate_x-3--translate_y-3--rotate-3,shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_y-3,autocontrast-3--equalize-3--posterize-3--solarize-3--translate_x-3,autocontrast-3--equalize-3--posterize-3--solarize-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--solarize-3--rotate-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--shear_y-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--translate_x-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--rotate-3,autocontrast-3--equalize-3--posterize-3--shear_y-3--translate_x-3,autocontrast-3--equalize-3--posterize-3--shear_y-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--shear_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--posterize-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--shear_y-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--translate_x-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--translate_y-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_y-3--translate_x-3,autocontrast-3--equalize-3--solarize-3--shear_y-3--translate_y-3,autocontrast-3--equalize-3--solarize-3--shear_y-3--rotate-3,autocontrast-3--equalize-3--solarize-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--solarize-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--solarize-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--shear_x-3--shear_y-3--translate_x-3,autocontrast-3--equalize-3--shear_x-3--shear_y-3--translate_y-3,autocontrast-3--equalize-3--shear_x-3--shear_y-3--rotate-3,autocontrast-3--equalize-3--shear_x-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--shear_x-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--shear_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--shear_y-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--translate_x-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--translate_y-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_y-3--translate_x-3,autocontrast-3--posterize-3--solarize-3--shear_y-3--translate_y-3,autocontrast-3--posterize-3--solarize-3--shear_y-3--rotate-3,autocontrast-3--posterize-3--solarize-3--translate_x-3--translate_y-3,autocontrast-3--posterize-3--solarize-3--translate_x-3--rotate-3 -m
python ../feature_corrupt_error.py hydra/launcher=$MSD_LAUNCHER rng_seed='${hydra:job.num}' ft.params.dataset_cfg.weights=$FEATURE_EXTRACTOR_DIR/checkpoints/model_epoch_0100.pyth train.params.aug_string=autocontrast-3--posterize-3--solarize-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--shear_x-3--shear_y-3--translate_x-3,autocontrast-3--posterize-3--shear_x-3--shear_y-3--translate_y-3,autocontrast-3--posterize-3--shear_x-3--shear_y-3--rotate-3,autocontrast-3--posterize-3--shear_x-3--translate_x-3--translate_y-3,autocontrast-3--posterize-3--shear_x-3--translate_x-3--rotate-3,autocontrast-3--posterize-3--shear_x-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--posterize-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--posterize-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--solarize-3--shear_x-3--shear_y-3--translate_x-3,autocontrast-3--solarize-3--shear_x-3--shear_y-3--translate_y-3,autocontrast-3--solarize-3--shear_x-3--shear_y-3--rotate-3,autocontrast-3--solarize-3--shear_x-3--translate_x-3--translate_y-3,autocontrast-3--solarize-3--shear_x-3--translate_x-3--rotate-3,autocontrast-3--solarize-3--shear_x-3--translate_y-3--rotate-3,autocontrast-3--solarize-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--solarize-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--solarize-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--solarize-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3,equalize-3--posterize-3--solarize-3--shear_x-3--translate_x-3,equalize-3--posterize-3--solarize-3--shear_x-3--translate_y-3,equalize-3--posterize-3--solarize-3--shear_x-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_y-3--translate_x-3,equalize-3--posterize-3--solarize-3--shear_y-3--translate_y-3,equalize-3--posterize-3--solarize-3--shear_y-3--rotate-3,equalize-3--posterize-3--solarize-3--translate_x-3--translate_y-3,equalize-3--posterize-3--solarize-3--translate_x-3--rotate-3,equalize-3--posterize-3--solarize-3--translate_y-3--rotate-3,equalize-3--posterize-3--shear_x-3--shear_y-3--translate_x-3,equalize-3--posterize-3--shear_x-3--shear_y-3--translate_y-3,equalize-3--posterize-3--shear_x-3--shear_y-3--rotate-3,equalize-3--posterize-3--shear_x-3--translate_x-3--translate_y-3,equalize-3--posterize-3--shear_x-3--translate_x-3--rotate-3,equalize-3--posterize-3--shear_x-3--translate_y-3--rotate-3,equalize-3--posterize-3--shear_y-3--translate_x-3--translate_y-3,equalize-3--posterize-3--shear_y-3--translate_x-3--rotate-3,equalize-3--posterize-3--shear_y-3--translate_y-3--rotate-3,equalize-3--posterize-3--translate_x-3--translate_y-3--rotate-3,equalize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3,equalize-3--solarize-3--shear_x-3--shear_y-3--translate_y-3,equalize-3--solarize-3--shear_x-3--shear_y-3--rotate-3,equalize-3--solarize-3--shear_x-3--translate_x-3--translate_y-3,equalize-3--solarize-3--shear_x-3--translate_x-3--rotate-3,equalize-3--solarize-3--shear_x-3--translate_y-3--rotate-3,equalize-3--solarize-3--shear_y-3--translate_x-3--translate_y-3,equalize-3--solarize-3--shear_y-3--translate_x-3--rotate-3,equalize-3--solarize-3--shear_y-3--translate_y-3--rotate-3,equalize-3--solarize-3--translate_x-3--translate_y-3--rotate-3,equalize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,equalize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,equalize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,equalize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,equalize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3,posterize-3--solarize-3--shear_x-3--shear_y-3--translate_y-3,posterize-3--solarize-3--shear_x-3--shear_y-3--rotate-3,posterize-3--solarize-3--shear_x-3--translate_x-3--translate_y-3,posterize-3--solarize-3--shear_x-3--translate_x-3--rotate-3,posterize-3--solarize-3--shear_x-3--translate_y-3--rotate-3,posterize-3--solarize-3--shear_y-3--translate_x-3--translate_y-3,posterize-3--solarize-3--shear_y-3--translate_x-3--rotate-3,posterize-3--solarize-3--shear_y-3--translate_y-3--rotate-3,posterize-3--solarize-3--translate_x-3--translate_y-3--rotate-3,posterize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,posterize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,posterize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,posterize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,posterize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,solarize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,solarize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,solarize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,solarize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--translate_x-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_y-3--translate_x-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_y-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--solarize-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--shear_y-3--translate_x-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--shear_y-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--shear_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--shear_y-3--translate_x-3--rotate-3 -m
python ../feature_corrupt_error.py hydra/launcher=$MSD_LAUNCHER rng_seed='${hydra:job.num}' ft.params.dataset_cfg.weights=$FEATURE_EXTRACTOR_DIR/checkpoints/model_epoch_0100.pyth train.params.aug_string=autocontrast-3--equalize-3--posterize-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--shear_y-3--translate_y-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--shear_y-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--solarize-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--solarize-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_y-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--shear_y-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--translate_x-3--translate_y-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--translate_x-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--posterize-3--solarize-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--solarize-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--posterize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--posterize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--solarize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--solarize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--solarize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3,equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_y-3,equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_x-3--translate_x-3--translate_y-3,equalize-3--posterize-3--solarize-3--shear_x-3--translate_x-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_x-3--translate_y-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_y-3--translate_x-3--translate_y-3,equalize-3--posterize-3--solarize-3--shear_y-3--translate_x-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_y-3--translate_y-3--rotate-3,equalize-3--posterize-3--solarize-3--translate_x-3--translate_y-3--rotate-3,equalize-3--posterize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,equalize-3--posterize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,equalize-3--posterize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,equalize-3--posterize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,equalize-3--posterize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,equalize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,equalize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,equalize-3--solarize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,equalize-3--solarize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,equalize-3--solarize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,equalize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,posterize-3--solarize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,posterize-3--solarize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,posterize-3--solarize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,posterize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,equalize-3--posterize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3 -m
python ../feature_corrupt_error.py hydra/launcher=$MSD_LAUNCHER rng_seed='${hydra:job.num}' ft.params.dataset_cfg.weights=$FEATURE_EXTRACTOR_DIR/checkpoints/model_epoch_0100.pyth train.params.aug_string=equalize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3,autocontrast-3--equalize-3--posterize-3--solarize-3--shear_x-3--shear_y-3--translate_x-3--translate_y-3--rotate-3 -m
python ../feature_corrupt_error.py rng_seed=$RANDOM ft.params.dataset_cfg.weights=$FEATURE_EXTRACTOR_DIR/checkpoints/model_epoch_0100.pyth train=cifar10_aa ft_augmentation=train_mirror_aa hydra/launcher=$MSD_LAUNCHER -m
python ../feature_corrupt_error.py rng_seed=$RANDOM ft.params.dataset_cfg.weights=$FEATURE_EXTRACTOR_DIR/checkpoints/model_epoch_0100.pyth train=cifar10_pg ft_augmentation=train_mirror_pg hydra/launcher=$MSD_LAUNCHER -m
|
#!/bin/bash
set -x
set -e
# Build the docset
dashing build
# Path to docset db index
DB=Crystal.docset/Contents/Resources/docSet.dsidx
# Fix newlines
newlined=$(echo "SELECT id FROM searchIndex WHERE name LIKE '%
%';" | sqlite3 $DB)
for id in $newlined; do
name=$(echo "SELECT name FROM searchIndex WHERE id = ${id};" | sqlite3 $DB)
newname=$(echo $name | awk '{ print $1 }')
echo "UPDATE searchIndex SET name = '${newname}' WHERE id = ${id};" | sqlite3 $DB
done
# Compress docset
tar --exclude='.DS_Store' -cvzf Crystal.tgz Crystal.docset
|
def replace_vowels(string):
result = ""
for c in string:
if c in "aeiouAEIOU":
result += chr(ord(c) + 1)
else:
result += c
return result
print(replace_vowels("Hello World")) |
class OfficialDocument:
def __init__(self, identifier, title, content):
self.identifier = identifier
self.title = title
self.content = content
def __str__(self):
return f"ID: {self.identifier}, Title: {self.title}"
def get_content(self):
return self.content
class OfficialDocumentsCollection:
def __init__(self):
self.documents = []
def add_document(self, document):
self.documents.append(document)
def get_document_titles(self):
return [doc.title for doc in self.documents]
def get_document_by_id(self, identifier):
for doc in self.documents:
if doc.identifier == identifier:
return doc
return None |
def categorize_characters(input_string):
categorized_characters = {}
for i in range(0, len(input_string), 2):
category = input_string[i]
character = input_string[i + 1]
if category in categorized_characters:
categorized_characters[category].append(character)
else:
categorized_characters[category] = [character]
return categorized_characters
input_string = "A3 B@ C5 D! E2 F&"
output = categorize_characters(input_string)
print(output) |
#!/bin/bash
if ! [[ "$2" =~ ^(git@)?(www.)?github.com(:|/)anwang/safe(.git)?$ ]]; then
exit 0
fi
while read LINE; do
set -- A $LINE
if [ "$4" != "refs/heads/master" ]; then
continue
fi
if ! ./contrib/verify-commits/verify-commits.sh $3 > /dev/null 2>&1; then
echo "ERROR: A commit is not signed, can't push"
./contrib/verify-commits/verify-commits.sh
exit 1
fi
done < /dev/stdin
|
<filename>BlockAPI.js<gh_stars>0
const Hapi=require('hapi');
/**
* Class to create and initialize hapi.js server and its controllers based on the Examples provided in the Blockchain Developer Nanodegree Blockchain Web Services module
*
*/
class BlockAPI {
constructor() {
this.server = Hapi.Server({
port: 8000,
host: 'localhost'
});
this.initControllers();
this.start();
}
//Initializes hapi.js controllers
initControllers() {
require("./BlockController.js")(this.server);
}
// Function to start the server
async start() {
await this.server.start();
console.log(`Server running at: ${this.server.info.uri}`);
}
}
new BlockAPI(); |
#!/bin/bash
aws s3 sync --profile di "resources_test" "s3://openpipelines-data" --delete --dryrun |
parallel --jobs 6 < ./results/exp_iterations/run-3/lustre_4n_6t_6d_1000f_617m_1i/jobs/jobs_n0.txt
|
class LocationManager {
private $ubicacion_gps;
private $descripcion;
function setUbicacion_gps($ubicacion_gps) {
$this->ubicacion_gps = $ubicacion_gps;
}
function setDescripcion($descripcion) {
$this->descripcion = $descripcion;
}
} |
def minimax(board, depth, maxTurn):
result=checkWinner(board);
if result==maxTurn:
return -1,0;
elif result==(maxTurn^1):
return 1,0;
elif result==2:
return 0,0;
if maxTurn==True:
best=-2, None
else: best=2, None
for move in getMoves(board):
board[move[0]][move[1]]=maxTurn
m,miniMax=minimax(board,depth+1,maxTurn^1)
board[move[0]][move[1]]="-"
m=-m
if maxTurn==True:
if m>best[0]:
best=m,move
else:
if m<best[0]:
best=m,move
return best
def predictMove(board):
m,move=minimax(board, 0, True)
return move |
<gh_stars>0
/*
*
* Copyright 2016 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include "pkt_info.h"
//#include <odp/helper/tcp.h>
#include <odp/helper/ip.h>
typedef struct{ uint64_t d[2]; } ipv6addr;
typedef struct{ uint16be_t src,dest; } ports;
proc_result_t pktinf_input(odp_packet_t pkt,void* args) ODP_HOT_CODE;
proc_result_t pktinf_input(odp_packet_t pkt,void* args){
proc_result_t result;
struct pkt_info *info = args;
result.packet_handle = pkt;
result.next_action = NA_CONTINUE;
if(odp_packet_has_ipv4(pkt)){
odph_ipv4hdr_t* hdr = odp_packet_l3_ptr(pkt,NULL);
*((uint32be_t*)info->src_ip) = hdr->src_addr;
*((uint32be_t*)info->dst_ip) = hdr->dst_addr;
}else if(odp_packet_has_ipv6(pkt)){
odph_ipv6hdr_t* hdr = odp_packet_l3_ptr(pkt,NULL);
*((ipv6addr*)info->src_ip) = *((ipv6addr*)hdr->src_addr);
*((ipv6addr*)info->dst_ip) = *((ipv6addr*)hdr->dst_addr);
}
if(odp_packet_has_tcp(pkt)||odp_packet_has_udp(pkt)||odp_packet_has_sctp(pkt)){
ports p = *((ports*)odp_packet_l4_ptr(pkt,NULL));
info->src_port = p.src;
info->dst_port = p.dest;
}
return result;
}
|
package com.lambdaschool.solution;
public class Main {
public static void main(String[] args) {
AthleteCreationInjector injector;
Processor app;
// Create a Track Athlete
injector = new TrackAthleteCreationInjector();
app = injector.getProcess();
app.displayAthlete();
app.displayCourt();
// Create a Hockey Athlete
injector = new HockeyAthleteCreationInjector();
app = injector.getProcess();
app.displayAthlete();
app.displayCourt();
// Create a Rugby Athlete
injector = new RugbyAthleteCreationInjector();
app = injector.getProcess();
app.displayAthlete();
app.displayCourt();
// Create a Baseball Athlete
injector = new BaseballAthleteCreationInjector();
app = injector.getProcess();
app.displayAthlete();
app.displayCourt();
}
}
|
require 'fog/core/collection'
module Fog
module Compute
class OracleCloud
class Shapes < Fog::Collection
model Fog::Compute::OracleCloud::Shape
def get(name)
data = service.get_shape(name).body
new(data)
end
def all
data = service.list_shapes().body['result']
load(data)
end
end
end
end
end
|
<reponame>gautiselvaraj/nag-me<gh_stars>0
import React from 'react';
import { mount } from 'enzyme';
import renderer from 'react-test-renderer';
import H5 from '../../components/H5';
import theme from '../../theme';
import 'jest-styled-components';
describe('<H5 />', () => {
it('should match snapshot', () => {
const tree = renderer.create(<H5 theme={theme}>Heading 5</H5>).toJSON();
expect(tree).toMatchSnapshot();
});
it('should have correct text & element', () => {
const h5 = mount(<H5 theme={theme}>Heading 5</H5>);
expect(h5.text()).toBe('Heading 5');
expect(h5.find('h5').length).toBe(1);
});
});
|
package view
import (
"github.com/ungerik/go-start/errs"
"strings"
)
///////////////////////////////////////////////////////////////////////////////
// Video
// Video shows a Youtube Video, other formats to come.
type Video struct {
ViewBaseWithId
Class string
URL string
Width int
Height int
//Description string
}
func (self *Video) Render(ctx *Context) (err error) {
youtubeId := ""
switch {
case strings.HasPrefix(self.URL, "http://youtu.be/"):
i := len("http://youtu.be/")
youtubeId = self.URL[i : i+11]
case strings.HasPrefix(self.URL, "http://www.youtube.com/watch?v="):
i := len("http://www.youtube.com/watch?v=")
youtubeId = self.URL[i : i+11]
}
if youtubeId != "" {
ctx.Response.XML.OpenTag("iframe")
ctx.Response.XML.Attrib("id", self.ID())
ctx.Response.XML.AttribIfNotDefault("class", self.Class)
width := self.Width
if width == 0 {
width = 640
}
height := self.Height
if height == 0 {
height = 390
}
ctx.Response.XML.Attrib("src", "http://www.youtube.com/embed/", youtubeId)
ctx.Response.XML.Attrib("width", width)
ctx.Response.XML.Attrib("height", height)
ctx.Response.XML.Attrib("frameborder", "0")
ctx.Response.XML.Attrib("allowfullscreen", "allowfullscreen")
ctx.Response.XML.CloseTag()
return nil
}
return errs.Format("Unsupported video URL: %s", self.URL)
}
|
<filename>bionetgen/main.py
from bionetgen.modelapi.utils import run_command
import cement
import subprocess, os
import bionetgen as bng
from cement.core.exc import CaughtSignal
from .core.exc import BioNetGenError
from .core.main import runCLI
from .core.main import plotDAT
from .core.main import runAtomizeTool
from .core.main import printInfo
from .core.main import visualizeModel
from .core.notebook import BNGNotebook
# pull defaults defined in core/defaults
CONFIG = bng.defaults.config
VERSION_BANNER = bng.defaults.banner
# require version argparse action
import argparse, sys
from pkg_resources import packaging
class requireAction(argparse.Action):
def __init__(self, option_strings, dest, nargs=None, **kwargs):
if nargs is not None:
raise ValueError("nargs not allowed")
super().__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
if values is not None:
req_version = packaging.version.parse(values)
cver = bng.core.version.get_version()
cur_version = packaging.version.parse(cver)
# if we don't meet requirement, warn user
sys.tracebacklimit = 0
if not (cur_version >= req_version):
raise RuntimeError(
f"Version {values} is required but current version is {cver}. \n"
+ "Try running `pip install bionetgen --upgrade`"
)
# return super().__call__(parser, namespace, values, option_string=option_string)
class BNGBase(cement.Controller):
"""
Base cement controller for BioNetGen CLI
Used to set meta attributes like program name (label) as well
as command line arguments. Each method is a subcommand in the
command line with its own command line arguments.
Subcommands
-------
run
runs a model given by -i in folder given by -o
notebook
generates and opens a notebook for a model given by -i, optional
plot
plots a gdat/cdat/scan file given by -i into file supplied by -o
info
provides version and path information about the BNG installation and dependencies
visualize
provides various visualization options for BNG models
"""
class Meta:
label = "bionetgen"
description = "A simple CLI to bionetgen <https://bionetgen.org>. Note that you need Perl installed."
help = "bionetgen"
arguments = [
# TODO: Auto-load in BioNetGen version here
(["-v", "--version"], dict(action="version", version=VERSION_BANNER)),
# (['-s','--sedml'],dict(type=str,
# default=CONFIG['bionetgen']['bngpath'],
# help="Optional path to SED-ML file, if available the simulation \
# protocol described in SED-ML will be ran")),
(["-req", "--require"], dict(action=requireAction, type=str, default=None)),
]
# This overwrites the default behavior and runs the CLI object from core/main
# which in turn just calls BNG2.pl with the supplied options
@cement.ex(
help="Runs a given model using BNG2.pl",
arguments=[
(
["-i", "--input"],
{
"help": "Path to BNGL file (required)",
"default": None,
"type": str,
"required": True,
},
),
(
["-o", "--output"],
{
"help": 'Optional path to output folder (default: ".")',
"default": ".",
"type": str,
},
),
(
["-l", "--log"],
{
"help": "saves BNG2.pl log to a file given (default: None)",
"default": None,
"type": str,
"dest": "log_file",
},
),
(
["--traceback-depth"],
{
"help": "Sets the traceback depth for python. "
+ "Defaults to 0 to avoid long tracebacks after a failed BNG2.pl call",
"default": 0,
"type": int,
"dest": "traceback_depth",
},
),
],
)
def run(self):
"""
This is the main run functionality of the CLI.
It uses a convenience function defined in core/main
to run BNG2.pl using subprocess, given the set of arguments
in the command line and the configuraions set by the defaults
as well as the end-user.
"""
args = self.app.pargs
runCLI(self.app.config, args)
@cement.ex(
help="Starts a Jupyter notebook to help run and analyze \
bionetgen models",
arguments=[
(
["-i", "--input"],
{
"help": "Path to BNGL file to use with notebook",
"default": None,
"type": str,
"required": False,
},
),
(
["-o", "--output"],
{
"help": "(optional) File to write the notebook in",
"default": "",
"type": str,
},
),
(
["-op", "--open"],
{
"help": "(optional) If given, the notebook will by opened using nbopen",
"action": "store_true",
},
),
],
)
def notebook(self):
"""
Notebook subcommand that boots up a Jupyter notebook using the
nbopen library. It uses a BNGNotebook class defined in core/notebook.
The default template can be found under assets and in the future
will likely be replaced by a standard templating tool (e.g. Jinja).
The default base template is agnostic to the model and if -i is given
the template then will be adjusted to load in the model supplied.
"""
args = self.app.pargs
if args.input is not None:
# we want to use the template to write a custom notebok
assert args.input.endswith(
".bngl"
), f"File {args.input} doesn't have bngl extension!"
try:
import bionetgen
m = bionetgen.bngmodel(args.input)
str(m)
except:
raise RuntimeError(f"Couldn't import given model: {args.input}!")
notebook = BNGNotebook(
CONFIG["bionetgen"]["notebook"]["template"], INPUT_ARG=args.input
)
else:
# just use the basic notebook
notebook = BNGNotebook(CONFIG["bionetgen"]["notebook"]["path"])
# find our file name
if len(args.output) == 0:
fname = CONFIG["bionetgen"]["notebook"]["name"]
else:
fname = args.output
# write the notebook out
if os.path.isdir(fname):
if args.input is not None:
basename = os.path.basename(args.input)
mname = basename.replace(".bngl", "")
fname = mname + ".ipynb"
else:
mname = CONFIG["bionetgen"]["notebook"]["name"]
fname = os.path.join(args.output, mname)
notebook.write(fname)
# open the notebook with nbopen
stdout = getattr(subprocess, CONFIG["bionetgen"]["stdout"])
stderr = getattr(subprocess, CONFIG["bionetgen"]["stderr"])
if args.open:
command = ["nbopen", fname]
rc, _ = run_command(command)
@cement.ex(
help="Rudimentary plotting of gdat/cdat/scan files",
arguments=[
(
["-i", "--input"],
{
"help": "Path to .gdat/.cdat file to use plot",
"default": None,
"type": str,
"required": True,
},
),
(
["-o", "--output"],
{
"help": 'Optional path for the plot (default: "$model_name.png")',
"default": ".",
"type": str,
},
),
(
["--legend"],
{
"help": "To plot the legend or not (default: False)",
"default": False,
"action": "store_true",
"required": False,
},
),
(
["--xmin"],
{
"help": "x-axis minimum (default: determined from data)",
"default": None,
"type": float,
},
),
(
["--xmax"],
{
"help": "x-axis maximum (default: determined from data)",
"default": False,
"type": float,
},
),
(
["--ymin"],
{
"help": "y-axis minimum (default: determined from data)",
"default": False,
"type": float,
},
),
(
["--ymax"],
{
"help": "y-axis maximum (default: determined from data)",
"default": False,
"type": float,
},
),
(["--xlabel"], {"help": "x-axis label (default: time)", "default": False}),
(
["--ylabel"],
{"help": "y-axis label (default: concentration)", "default": False},
),
(
["--title"],
{
"help": "title of plot (default: determined from input file)",
"default": False,
},
),
],
)
def plot(self):
"""
Plotting subcommand for very basic plotting using a convenience function
defined in core/main.
Currently we support gdat/cdat/scan file plotting, in a very basic manner.
This command expects a space separated file where each column is a series.
The first column is used for the x-axis and the rest is used as y-axis
and every series is plotted.
See bionetgen plot -h for all the allowed options.
"""
args = self.app.pargs
# we need to have gdat/cdat files
assert (
args.input.endswith(".gdat")
or args.input.endswith(".cdat")
or args.input.endswith(".scan")
), "Input file has to be either a gdat or a cdat file"
plotDAT(args.input, args.output, kw=dict(args._get_kwargs()))
@cement.ex(
help="Provides version information for BNG and dependencies",
arguments=[
(
["-d", "--detail"],
{
"help": "Adds more detail to the information printed.",
"default": False,
"action": "store_true",
},
),
],
)
def info(self):
"""
Information subcommand to provide installation versions and paths.
Currently provides version information for BioNetGen, the BNG CLI, Perl,
numpy, pandas, and libroadrunner. Also provides BNG2.pl and pyBNG paths.
"""
args = self.app.pargs
printInfo(self.app.config, args)
@cement.ex(
help="Provides a simple way to get various visualizations of the model.",
arguments=[
(
["-i", "--input"],
{
"help": "Path to BNGL model to visualize",
"default": None,
"type": str,
"required": True,
},
),
(
["-o", "--output"],
{
"help": "(optional) Output folder, defaults to current folder",
"default": None,
"type": str,
},
),
(
["-t", "--type"],
{
"help": "(optional) Type of visualization requested. Valid options are: "
+ "'ruleviz_pattern','ruleviz_operation', 'contactmap' and 'regulatory'."
+ " Defaults to 'contactmap'.",
"default": "",
"type": str,
},
),
],
)
def visualize(self):
"""
Subcommand to generate visualizations. Currently only supports visualize
action from BioNetGen.
Types of visualizations and their options
- Rule pattern visualization: Visualization of each rule as a bipartite graph
- Rule operation visualization: Visualization of each rule showing explicit graph operations
- Contact map: Visualize the contact map of the model
- Regulatory graph: Visualize the regulatory graph of the model
"""
args = self.app.pargs
visualizeModel(self.app.config, args)
@cement.ex(
help="SBML to BNGL translator",
arguments=[
(
["-i", "--input"],
{
"help": "input SBML file",
"default": None,
"type": str,
"required": True,
},
),
(
["-o", "--output"],
{
"help": "output SBML file",
"default": ".",
"type": str,
},
),
(
["-t", "--annotation"],
{
"help": "keep annotation information",
"default": False,
"action": "store_true",
},
),
(
["-c", "--convention-file"],
{
"help": "Conventions file",
"type": str,
},
),
(
["-n", "--naming-conventions"],
{
"help": "Naming conventions file",
"type": str,
},
),
(
["-u", "--user-structures"],
{
"help": "User defined species",
"type": str,
},
),
(
["-id", "--molecule-id"],
{
"help": "use SBML molecule ids instead of names. IDs are less descriptive but more bngl friendly. Use only if the generated BNGL has syntactic errors",
"default": False,
"action": "store_true",
},
),
(
["-a", "--atomize"],
{
"help": "Infer molecular structure",
"default": False,
"action": "store_true",
},
),
(
["-p", "--pathwaycommons"],
{
"help": "Use pathway commons to infer molecule binding. This setting requires an internet connection and will query the pathway commons web service.",
"default": False,
"action": "store_true",
},
),
(
["-s", "--isomorphism-check"],
{
"help": "disallow atomizations that produce the same graph structure",
"action": "store_true",
},
),
(
["-I", "--ignore"],
{
"help": "ignore atomization translation errors",
"action": "store_true",
},
),
(
["-mr", "--memoized-resolver"],
{
"help": "sometimes the dependency graph is too large and might cause a very large memory requirement. This option will slow the translator down but will decrease memory usage",
"default": False,
"action": "store_true",
},
),
(
["-k", "--keep-local-parameters"],
{
"help": "this option will keep the local parameters unresolved so that they can be controlled from the parameter section in the BNGL. Without this option, local parameters will be resolved to their values in functions",
"default": False,
"action": "store_true",
},
),
(
["-q", "--quiet-mode"],
{
"help": "this option will supress logging into STDIO and instead will write the logging into a file",
"default": False,
"action": "store_true",
},
),
(
["-ll", "--log-level"],
{
"help": 'This option allows you to select a logging level, from quietest to loudest options are: "CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG". Default is set to DEBUG',
"default": "DEBUG",
"type": str,
},
),
# (
# ["-cu", "--convert-units"],
# {
# "help": "convert units. Otherwise units are copied straight from sbml to bngl",
# "default": True,
# "action": "store_false",
# },
# ),
],
)
def atomize(self):
args = self.app.pargs
runAtomizeTool(self.app.config, args)
class BioNetGen(cement.App):
"""
Cement app for BioNetGen CLI
Used to set configuration options like config default,
exiting on close and setting log handler. Currently set
attributes are below.
Attributes
----------
label : str
name of the application
config_defaults : str
the default set of configuration options, set in BNGDefaults object
config_handler: str
the name of the config handler, determines the syntax of the config files
config_file_suffix: str
the suffix to be used for config files
config_files: list of str
additional list of config files to enable
exit_on_close : boolean
determine if the app should exit when the key function is ran
extensions : list of str
extensions to be used with cement framework
log_handler: str
name of the log handler
handlers: list of obj
list of objects derived from cement.Controller that handles the actual CLI
"""
class Meta:
label = "bionetgen"
# configuration defaults
config_defaults = CONFIG
# call sys.exit() on close
exit_on_close = True
# load additional framework extensions
extensions = [
"yaml",
"colorlog",
]
# configuration handler
config_handler = "configparser"
# configuration file suffix
config_file_suffix = ".conf"
# add current folder to the list of config dirs
config_files = ["./.{}.conf".format(label)]
# set the log handler
log_handler = "colorlog"
# register handlers
handlers = [BNGBase]
class BioNetGenTest(cement.TestApp, BioNetGen):
"""
A sub-class of BioNetGen CLI application for testing
purposes. See tests/test_bionetgen.py for examples.
"""
class Meta:
label = "bionetgen"
def main():
with BioNetGen() as app:
try:
app.run()
except AssertionError as e:
print("AssertionError > %s" % e.args[0])
app.exit_code = 1
if app.debug is True:
import traceback
traceback.print_exc()
except BioNetGenError as e:
print("BioNetGenError > %s" % e.args[0])
app.exit_code = 1
if app.debug is True:
import traceback
traceback.print_exc()
except CaughtSignal as e:
# Default Cement signals are SIGINT and SIGTERM, exit 0 (non-error)
print("\n%s" % e)
app.exit_code = 0
if __name__ == "__main__":
main()
|
#!/bin/bash
yarn run -s start |
#!/usr/bin/env ruby -wKU
require 'mongrel'
require ENV['TM_SUPPORT_PATH'] + "/lib/exit_codes"
require ENV['TM_SUPPORT_PATH'] + "/lib/progress"
require ENV["TM_SUPPORT_PATH"] + "/lib/web_preview"
require ENV["TM_SUPPORT_PATH"] + "/lib/escape"
module FlashMate
# This should be tested and generated
SERVER_PORT = 3579
HOSTNAME = "0.0.0.0"
class SWF
attr_reader :path, :width, :height, :color, :messages
def initialize(path, width, height, color = "##ffffff", messages = {})
@path = path
@width = width
@height = height
@color = color
@messages = messages
end
end
class FlashServer
def initialize(swf, debugger = nil, port = SERVER_PORT)
@debugger = debugger
@port = port
@server = Mongrel::HttpServer.new(HOSTNAME, port)
@server.register("/_swf", Mongrel::DirHandler.new(swf.path))
@server.register("/_shutdown", ShutdownHandler.new(@server))
@server.register("/_scripts", ScriptsHandler.new(swf, debugger, self))
@server.register("/_player", Mongrel::DirHandler.new("#{ENV['TM_BUNDLE_SUPPORT']}/player/index.html"))
@server.register("/_javascripts", Mongrel::DirHandler.new("#{ENV['TM_BUNDLE_SUPPORT']}/player/_javascripts"))
@server.register("/", Mongrel::DirHandler.new(File.dirname(swf.path)))
trap("INT") {@server.stop}
end
def run
@server.run
end
def url
return "http://#{HOSTNAME}:#{@port}"
end
end
class PixelBenderServer < FlashServer
def initialize(pbj)
# TODO: Make sure this path works properly
# super("pbj_player.swf")
end
end
# Once the message handler is called it calls the shutdown handler
class ScriptsHandler < Mongrel::HttpHandler
def initialize(swf, debugger, server)
@swf = swf
@debugger = debugger
@server = server
@swf_loaded = false
end
def process(request, response)
response.start(200) do |head,out|
# Put a delay between the calls
sleep 0.2
head["Content-Type"] = "text/javascript"
unless @swf_loaded then
@swf.messages.each do |msg|
m = msg.to_debug
m.gsub!(/'/) { |match| "\\#{match}" }
m.gsub!(/[\n\r]/) { |match| "" }
out.write "Warptube.Debug.trace('#{m}');"
end
out.write("Warptube.Debug.loadSWF('/_swf', '#{@swf.width}', " +
"'#{@swf.height}', '#{@swf.color}');")
@swf_loaded = true
# We need a small delay to make sure everything is all setup
# on the page before we begin debugging.
sleep 0.5
end
if @debugger then
if @debugger.debugging then
@debugger.each_message do |message|
msg = message.gsub(/(')/) {|match| "\\#{match}"}
out.write("Warptube.Debug.trace('#{msg}');")
end
out.write("$.getScript('/_scripts');")
else
@server.stop
end
else
out.write("$.getScript('/_shutdown');")
end
end
end
end
# When something calls this handler it will stop the server
class ShutdownHandler < Mongrel::HttpHandler
def initialize(server)
raise "Need to have a server to shutdown for this handler to work." unless server
@server = server
end
def process(request, response)
response.start(200) {}
@server.stop
end
end
# Shows a page with all the compiler messages, this is used usually when
# a compile fails and we don't open the runtime page
def FlashMate.display_messages messages
puts html_head(:window_title => "ActionScript Compiler Messages",
:page_title => "ActionScript Compiler Messages")
messages.each do |message|
puts message.to_debug
puts "<br />"
end
html_footer
end
# Causes a JS redirect inside a TextMate HTML window to the specified URL.
def FlashMate.open_page url
puts <<PLAYER
<html>
<body onload="document.location.href = '#{url}'"></body>
</html>
PLAYER
end
def FlashMate.namespace
print ENV["TM_NEW_FILE_DIRECTORY"].sub(/#{ENV["TM_PROJECT_DIRECTORY"]}\/*/, "").gsub(/\//, ".") + " "
end
end
# Extend Hash so we have our conversion to our debug output
class Hash
def to_debug
# return "[No message]" unless self[:file] || self[:message]
message = ""
message += "<em>#{self[:type]}:</em> " if self[:type]
message += self[:message] if self[:message]
message += "<br />"
if self[:file]
link_message = "#{self[:file]}"
link = "txmt://open?url=file://#{self[:file]}"
if self[:line]
link += "&line=#{self[:line]}"
link_message += " - (#{self[:line]})"
end
link += "&column=#{self[:column]}" if self[:column]
message += " <a href=\"#{link}\">#{link_message}</a>"
end
# TODO: Parse extra data if it exists.
return message
end
end |
<reponame>Symphoomc1f/krexusj
/*
* Copyright 2017-2020 吴学文 and java110 team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.java110.things.extApi.attendance;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.java110.things.Controller.BaseController;
import com.java110.things.adapt.attendance.IAttendanceService;
import com.java110.things.entity.attendance.AttendanceClassesAttrDto;
import com.java110.things.entity.attendance.AttendanceClassesDto;
import com.java110.things.entity.attendance.AttendanceClassesStaffDto;
import com.java110.things.entity.attendance.AttendanceClassesTaskDto;
import com.java110.things.entity.response.ResultDto;
import com.java110.things.entity.user.StaffDto;
import com.java110.things.service.community.ICommunityService;
import com.java110.things.service.staff.IStaffService;
import com.java110.things.util.Assert;
import com.java110.things.util.BeanConvertUtil;
import com.java110.things.util.DateUtil;
import com.java110.things.util.SeqUtil;
import com.java110.things.util.StringUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
/**
* 考勤 控制类
* <p>
* 完成考勤添加 修改 删除 查询功能
* <p>
* add by wuxw 2020-12-17
*/
@RestController
@RequestMapping(path = "/extApi/attendance")
public class AttendanceExtController extends BaseController {
@Autowired
IAttendanceService attendanceServiceImpl;
@Autowired
private IStaffService staffServiceImpl;
@Autowired
ICommunityService communityServiceImpl;
/**
* 添加考勤信息
* <p>
*
* @param reqParam {
* "attendanceCode":""
* attendance_name
* attendance_type_cd
* create_time
* status_cd
* oem
* ext_attendance_id
* community_id
* hm_id
* }
* @return 成功或者失败
* @throws Exception
*/
@RequestMapping(path = "/addAttendanceClass", method = RequestMethod.POST)
public ResponseEntity<String> addAttendanceClass(@RequestBody String reqParam) throws Exception {
JSONObject reqJson = JSONObject.parseObject(reqParam);
Assert.hasKeyAndValue(reqJson, "classesName", "未包含考勤班组");
Assert.hasKeyAndValue(reqJson, "timeOffset", "未包含打卡范围");
Assert.hasKeyAndValue(reqJson, "clockCount", "未包含打卡次数");
Assert.hasKeyAndValue(reqJson, "clockType", "未包含打卡类型");
Assert.hasKeyAndValue(reqJson, "clockTypeValue", "未包含打卡规则");
Assert.hasKeyAndValue(reqJson, "lateOffset", "未包含迟到时间");
Assert.hasKeyAndValue(reqJson, "leaveOffset", "未包含早退时间");
Assert.hasKeyAndValue(reqJson, "extClassesId", "未包含外部班次ID");
Assert.hasKeyAndValue(reqJson, "taskId", "未包含任务ID");
if (!reqJson.containsKey("attrs")) {
throw new IllegalArgumentException("未包含属性");
}
JSONArray attrs = reqJson.getJSONArray("attrs");
if (attrs.size() < 1) {
throw new IllegalArgumentException("未包含属性");
}
AttendanceClassesDto attendanceClassesDto = BeanConvertUtil.covertBean(reqJson, AttendanceClassesDto.class);
attendanceClassesDto.setClassesId(SeqUtil.getId());
List<AttendanceClassesAttrDto> attendanceClassesAttrDtos = new ArrayList<>();
for (int attrIndex = 0; attrIndex < attrs.size(); attrIndex++) {
JSONObject attrObj = attrs.getJSONObject(attrIndex);
AttendanceClassesAttrDto attendanceClassesAttrDto = BeanConvertUtil.covertBean(attrObj, AttendanceClassesAttrDto.class);
attendanceClassesAttrDto.setClassesId(attendanceClassesDto.getClassesId());
attendanceClassesAttrDto.setAttrId(SeqUtil.getId());
attendanceClassesAttrDtos.add(attendanceClassesAttrDto);
}
ResultDto result = attendanceServiceImpl.insertAttendanceClassesDto(attendanceClassesDto, attendanceClassesAttrDtos);
return ResultDto.createResponseEntity(result);
}
/**
* 修改考勤信息
* <p>
*
* @param reqParam {
* "name": "HC考勤",
* "address": "青海省西宁市",
* "cityCode": "510104",
* "extPaId": "702020042194860039"
* }
* @return 成功或者失败
* @throws Exception
*/
@RequestMapping(path = "/updateAttendanceClass", method = RequestMethod.POST)
public ResponseEntity<String> updateAttendanceClass(@RequestBody String reqParam) throws Exception {
JSONObject reqJson = JSONObject.parseObject(reqParam);
Assert.hasKeyAndValue(reqJson, "classesName", "未包含考勤班组");
Assert.hasKeyAndValue(reqJson, "timeOffset", "未包含打卡范围");
Assert.hasKeyAndValue(reqJson, "clockCount", "未包含打卡次数");
Assert.hasKeyAndValue(reqJson, "clockType", "未包含打卡类型");
Assert.hasKeyAndValue(reqJson, "clockTypeValue", "未包含打卡规则");
Assert.hasKeyAndValue(reqJson, "lateOffset", "未包含迟到时间");
Assert.hasKeyAndValue(reqJson, "leaveOffset", "未包含早退时间");
Assert.hasKeyAndValue(reqJson, "extClassesId", "未包含外部班次ID");
Assert.hasKeyAndValue(reqJson, "taskId", "未包含任务ID");
AttendanceClassesDto attendanceClassesDto = new AttendanceClassesDto();
attendanceClassesDto.setExtClassesId(reqJson.getString("extClassesId"));
List<AttendanceClassesDto> attendanceClassesDtos = attendanceServiceImpl.getAttendanceClasses(attendanceClassesDto);
Assert.listOnlyOne(attendanceClassesDtos, "不存在考勤班组");
if (!reqJson.containsKey("attrs")) {
throw new IllegalArgumentException("未包含属性");
}
JSONArray attrs = reqJson.getJSONArray("attrs");
if (attrs.size() < 1) {
throw new IllegalArgumentException("未包含属性");
}
attendanceClassesDto = BeanConvertUtil.covertBean(reqJson, AttendanceClassesDto.class);
attendanceClassesDto.setStatusCd("0");
attendanceClassesDto.setClassesId(attendanceClassesDtos.get(0).getClassesId());
ResultDto result = attendanceServiceImpl.updateAttendanceClasses(attendanceClassesDto);
if (result.getCode() != ResultDto.SUCCESS) {
return ResultDto.createResponseEntity(result);
}
AttendanceClassesAttrDto attendanceClassesAttrDto = new AttendanceClassesAttrDto();
attendanceClassesAttrDto.setClassesId(attendanceClassesDtos.get(0).getClassesId());
attendanceServiceImpl.deleteAttendanceClassesAttrDto(attendanceClassesAttrDto);
for (int attrIndex = 0; attrIndex < attrs.size(); attrIndex++) {
JSONObject attrObj = attrs.getJSONObject(attrIndex);
attendanceClassesAttrDto = BeanConvertUtil.covertBean(attrObj, AttendanceClassesAttrDto.class);
attendanceClassesAttrDto.setClassesId(attendanceClassesDtos.get(0).getClassesId());
attendanceClassesAttrDto.setAttrId(SeqUtil.getId());
attendanceServiceImpl.saveAttendanceClassesAttrDto(attendanceClassesAttrDto);
}
return ResultDto.createResponseEntity(result);
}
/**
* 删除考勤信息
* <p>
*
* @param reqParam {
* "extPaId": "702020042194860039"
* }
* @return 成功或者失败
* @throws Exception
*/
@RequestMapping(path = "/deleteAttendanceClass", method = RequestMethod.POST)
public ResponseEntity<String> deleteAttendanceClass(@RequestBody String reqParam) throws Exception {
JSONObject reqJson = JSONObject.parseObject(reqParam);
Assert.hasKeyAndValue(reqJson, "extClassesId", "未包含外部考勤班次ID");
Assert.hasKeyAndValue(reqJson, "taskId", "未包含任务ID");
AttendanceClassesDto attendanceClassesDto = new AttendanceClassesDto();
attendanceClassesDto.setExtClassesId(reqJson.getString("extClassesId"));
List<AttendanceClassesDto> attendanceClassesDtos = attendanceServiceImpl.getAttendanceClasses(attendanceClassesDto);
Assert.listOnlyOne(attendanceClassesDtos, "不存在考勤班组");
AttendanceClassesAttrDto attendanceClassesAttrDto = new AttendanceClassesAttrDto();
attendanceClassesAttrDto.setClassesId(attendanceClassesDtos.get(0).getClassesId());
attendanceServiceImpl.deleteAttendanceClassesAttrDto(attendanceClassesAttrDto);
ResultDto resultDto = attendanceServiceImpl.deleteAttendanceClassesDto(attendanceClassesDtos.get(0));
return ResultDto.createResponseEntity(resultDto);
}
/**
* 添加考勤员工信息
* <p>
*
* @param reqParam {
* "attendanceCode":""
* attendance_name
* attendance_type_cd
* create_time
* status_cd
* oem
* ext_attendance_id
* community_id
* hm_id
* }
* @return 成功或者失败
* @throws Exception
*/
@RequestMapping(path = "/addAttendanceClassStaffs", method = RequestMethod.POST)
public ResponseEntity<String> addAttendanceClassStaffs(@RequestBody String reqParam) throws Exception {
JSONArray datas = JSONArray.parseArray(reqParam);
for (int dataIndex = 0; dataIndex < datas.size(); dataIndex++) {
addAttendanceClassStaff(datas.get(dataIndex).toString());
}
return ResultDto.success();
}
/**
* 添加考勤员工信息
* <p>
*
* @param reqParam {
* "attendanceCode":""
* attendance_name
* attendance_type_cd
* create_time
* status_cd
* oem
* ext_attendance_id
* community_id
* hm_id
* }
* @return 成功或者失败
* @throws Exception
*/
@RequestMapping(path = "/addAttendanceClassStaff", method = RequestMethod.POST)
public ResponseEntity<String> addAttendanceClassStaff(@RequestBody String reqParam) throws Exception {
JSONObject reqJson = JSONObject.parseObject(reqParam);
Assert.hasKeyAndValue(reqJson, "extClassesId", "未包含外部考勤班组ID");
Assert.hasKeyAndValue(reqJson, "extStaffId", "未包含外部员工ID");
Assert.hasKeyAndValue(reqJson, "staffName", "未包含员工名称");
Assert.hasKeyAndValue(reqJson, "departmentId", "未包含部门ID");
Assert.hasKeyAndValue(reqJson, "departmentName", "未包含部门名称");
Assert.hasKeyAndValue(reqJson, "taskId", "未包含任务ID");
StaffDto staffDto = new StaffDto();
staffDto.setExtStaffId(reqJson.getString("extStaffId"));
//检查员工是否存在
List<StaffDto> staffDtos = attendanceServiceImpl.queryStaffs(staffDto);
String staffId = "";
if (staffDtos == null || staffDtos.size() < 1) {
StaffDto tmpStaffDto = BeanConvertUtil.covertBean(reqJson, StaffDto.class);
tmpStaffDto.setStaffId(SeqUtil.getId());
staffServiceImpl.saveStaff(tmpStaffDto);
staffId = tmpStaffDto.getStaffId();
} else {
staffId = staffDtos.get(0).getStaffId();
}
AttendanceClassesDto attendanceClassesDto = new AttendanceClassesDto();
attendanceClassesDto.setExtClassesId(reqJson.getString("extClassesId"));
List<AttendanceClassesDto> attendanceClassesDtos = attendanceServiceImpl.getAttendanceClasses(attendanceClassesDto);
Assert.listOnlyOne(attendanceClassesDtos, "不存在考勤班组");
//判断员工是否在这个考勤班组中
AttendanceClassesStaffDto attendanceClassesStaffDto = new AttendanceClassesStaffDto();
attendanceClassesStaffDto.setClassesId(attendanceClassesDtos.get(0).getClassesId());
attendanceClassesStaffDto.setStaffId(staffId);
List<AttendanceClassesStaffDto> attendanceClassesStaffDtos = attendanceServiceImpl.queryClassStaffs(attendanceClassesStaffDto);
//班组中已经存在
if (attendanceClassesStaffDtos != null && attendanceClassesStaffDtos.size() > 0) {
return ResultDto.success();
}
attendanceClassesStaffDto = BeanConvertUtil.covertBean(reqJson, AttendanceClassesStaffDto.class);
attendanceClassesStaffDto.setStaffId(staffId);
attendanceClassesStaffDto.setClassesId(attendanceClassesDtos.get(0).getClassesId());
attendanceClassesStaffDto.setCsId(SeqUtil.getId());
ResultDto resultDto = attendanceServiceImpl.saveClassStaff(attendanceClassesStaffDto);
return ResultDto.createResponseEntity(resultDto);
}
/**
* 添加考勤员工信息
* <p>
*
* @param reqParam {
* "attendanceCode":""
* attendance_name
* attendance_type_cd
* create_time
* status_cd
* oem
* ext_attendance_id
* community_id
* hm_id
* }
* @return 成功或者失败
* @throws Exception
*/
@RequestMapping(path = "/deleteAttendanceClassStaff", method = RequestMethod.POST)
public ResponseEntity<String> deleteAttendanceClassStaff(@RequestBody String reqParam) throws Exception {
JSONObject reqJson = JSONObject.parseObject(reqParam);
Assert.hasKeyAndValue(reqJson, "extClassesId", "未包含外部考勤班组ID");
Assert.hasKeyAndValue(reqJson, "extStaffId", "未包含外部员工ID");
Assert.hasKeyAndValue(reqJson, "taskId", "未包含任务ID");
StaffDto staffDto = new StaffDto();
staffDto.setExtStaffId(reqJson.getString("extStaffId"));
//检查员工是否存在
List<StaffDto> staffDtos = attendanceServiceImpl.queryStaffs(staffDto);
Assert.listOnlyOne(staffDtos, "员工不存在");
AttendanceClassesDto attendanceClassesDto = new AttendanceClassesDto();
attendanceClassesDto.setExtClassesId(reqJson.getString("extClassesId"));
List<AttendanceClassesDto> attendanceClassesDtos = attendanceServiceImpl.getAttendanceClasses(attendanceClassesDto);
Assert.listOnlyOne(attendanceClassesDtos, "不存在考勤班组");
//判断员工是否在这个考勤班组中
AttendanceClassesStaffDto attendanceClassesStaffDto = new AttendanceClassesStaffDto();
attendanceClassesStaffDto.setClassesId(attendanceClassesDtos.get(0).getClassesId());
attendanceClassesStaffDto.setStaffId(staffDtos.get(0).getStaffId());
List<AttendanceClassesStaffDto> attendanceClassesStaffDtos = attendanceServiceImpl.queryClassStaffs(attendanceClassesStaffDto);
//班组中已经存在
if (attendanceClassesStaffDtos == null || attendanceClassesStaffDtos.size() < 1) {
return ResultDto.error("物联网系统不存在考勤信息");
}
attendanceClassesStaffDto = BeanConvertUtil.covertBean(reqJson, AttendanceClassesStaffDto.class);
attendanceClassesStaffDto.setCsId(attendanceClassesStaffDtos.get(0).getCsId());
ResultDto resultDto = attendanceServiceImpl.deleteClassStaff(attendanceClassesStaffDto);
if (reqJson.containsKey("deleteStaff") && "1".equals(reqJson.getString("deleteStaff"))) {
staffServiceImpl.deleteStaff(staffDtos.get(0));
}
return ResultDto.createResponseEntity(resultDto);
}
/**
* 查询考勤任务
*
* @return 成功或者失败
* @throws Exception
*/
@RequestMapping(path = "/getAttendanceTasks", method = RequestMethod.GET)
public ResponseEntity<String> getAttendanceTasks(
@RequestParam int page,
@RequestParam int row,
@RequestParam(name = "extClassesId", required = false) String extClassesId,
@RequestParam(name = "staffName", required = false) String staffName,
@RequestParam(name = "date", required = false) String date,
@RequestParam(name = "departmentId", required = false) String departmentId
) throws Exception {
AttendanceClassesTaskDto attendanceClassesTaskDto = new AttendanceClassesTaskDto();
attendanceClassesTaskDto.setPage(page);
attendanceClassesTaskDto.setRow(row);
if (!StringUtil.isEmpty(extClassesId)) {
AttendanceClassesDto attendanceClassesDto = new AttendanceClassesDto();
attendanceClassesDto.setExtClassesId(extClassesId);
List<AttendanceClassesDto> attendanceClassesDtos = attendanceServiceImpl.getAttendanceClasses(attendanceClassesDto);
Assert.listOnlyOne(attendanceClassesDtos, "不存在考勤班组");
attendanceClassesTaskDto.setClassId(attendanceClassesDtos.get(0).getClassesId());
}
attendanceClassesTaskDto.setStaffName(staffName);
attendanceClassesTaskDto.setDepartmentId(departmentId);
if (!StringUtil.isEmpty(date)) {
Date reqDate = DateUtil.getDateFromString(date, DateUtil.DATE_FORMATE_STRING_B);
Calendar calendar = Calendar.getInstance();
calendar.setTime(reqDate);
attendanceClassesTaskDto.setTaskYear(calendar.get(Calendar.YEAR) + "");
attendanceClassesTaskDto.setTaskMonth((calendar.get(Calendar.MONTH) + 1) + "");
attendanceClassesTaskDto.setTaskDay(calendar.get(Calendar.DAY_OF_MONTH) + "");
}
ResultDto resultDto = attendanceServiceImpl.getAttendanceTasks(attendanceClassesTaskDto);
return super.createResponseEntity(resultDto);
}
}
|
source ./.env
tables="admin_menu admin_permissions admin_role_menu admin_role_permissions admin_role_users admin_roles admin_user_permissions admin_users"
mysqldump --host="${DB_HOST}" --port=${DB_PORT} --user="${DB_USERNAME}" --password="${DB_PASSWORD}" -t ${DB_DATABASE} ${tables} > database/admin.sql
|
#!/bin/bash
set -ex
/app/box/bin/wait-initdb.sh
gunicorn -b 0.0.0.0:6788 --threads 30 \
--forwarded-allow-ips '*' \
--access-logfile - \
--error-logfile - \
--log-level info \
rssant.wsgi
|
def generate_form(fields: list[tuple[str, dict]]) -> dict:
form = {}
for category, data in fields:
form[category] = list(data["fields"])
return form
def validate_field(form: dict, field: str, category: str) -> bool:
return field in form.get(category, []) |
#!/bin/sh
DIRNAME=`dirname "$0"`
GREP="grep"
. "$DIRNAME/common.sh"
# Use the maximum available, or set MAX_FD != -1 to use that
MAX_FD="maximum"
# OS specific support (must be 'true' or 'false').
cygwin=false;
darwin=false;
linux=false;
case "`uname`" in
CYGWIN*)
cygwin=true
;;
Darwin*)
darwin=true
;;
Linux)
linux=true
;;
esac
# For Cygwin, ensure paths are in UNIX format before anything is touched
if $cygwin ; then
[ -n "$JBOSS_HOME" ] &&
JBOSS_HOME=`cygpath --unix "$JBOSS_HOME"`
[ -n "$JAVA_HOME" ] &&
JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
[ -n "$JAVAC_JAR" ] &&
JAVAC_JAR=`cygpath --unix "$JAVAC_JAR"`
fi
# Setup JBOSS_HOME
RESOLVED_JBOSS_HOME=`cd "$DIRNAME/.."; pwd`
if [ "x$JBOSS_HOME" = "x" ]; then
# get the full path (without any relative bits)
JBOSS_HOME=$RESOLVED_JBOSS_HOME
else
SANITIZED_JBOSS_HOME=`cd "$JBOSS_HOME"; pwd`
if [ "$RESOLVED_JBOSS_HOME" != "$SANITIZED_JBOSS_HOME" ]; then
echo "WARNING JBOSS_HOME may be pointing to a different installation - unpredictable results may occur."
echo ""
fi
fi
export JBOSS_HOME
# Setup the JVM
if [ "x$JAVA_HOME" = x ]; then
fail_java_home () {
echo "JAVA_HOME is not set. Unable to locate the jars needed to run jconsole."
exit 2
}
JCONSOLE_PATH=`which jconsole` || fail_java_home
which readlink || fail_java_home # make sure readlink is present
JCONSOLE_TEST=`readlink "$JCONSOLE_PATH"`
while [ x"$JCONSOLE_TEST" != x ]; do
JCONSOLE_PATH="$JCONSOLE_TEST"
JCONSOLE_TEST=`readlink "$JCONSOLE_PATH"`
done
JAVA_HOME=`dirname "$JCONSOLE_PATH"`
JAVA_HOME=`dirname "$JAVA_HOME"`
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin; then
JBOSS_HOME=`cygpath --path --windows "$JBOSS_HOME"`
JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
fi
if [ ! -f "$JBOSS_HOME/bin/client/jboss-cli-client.jar" ]; then
echo "WARNING Jar not found: \"$JBOSS_HOME/bin/client/jboss-cli-client.jar\""
echo "WARNING If this jar is missing, jconsole will fail to connect to the server."
exit 2
fi
cd "$JBOSS_HOME"
"$JAVA_HOME/bin/java" --add-modules=java.se -version > /dev/null 2>&1 && MODULAR_JDK=true || MODULAR_JDK=false
if [ "$MODULAR_JDK" = "true" ]; then
$JAVA_HOME/bin/jconsole -J--add-modules=jdk.unsupported -J-Djava.class.path=./bin/client/jboss-cli-client.jar "$@"
else
CLASSPATH=$JAVA_HOME/lib/jconsole.jar
CLASSPATH=$CLASSPATH:$JAVA_HOME/lib/tools.jar
CLASSPATH=$CLASSPATH:./bin/client/jboss-cli-client.jar
echo CLASSPATH $CLASSPATH
$JAVA_HOME/bin/jconsole -J-Djava.class.path="$CLASSPATH" "$@"
fi
|
<filename>app/src/main/java/com/piercelbrooks/common/BasicServiceUser.java
// Author: <NAME>
package com.piercelbrooks.common;
public interface BasicServiceUser <T extends BasicService<T>>
{
public Class<?> getServiceClass();
public T getService();
}
|
python setup.py build
python setup.py install
|
export CI=azure
export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME
export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME})
.scripts/run_docker_build.sh
|
import React from "react";
import Navbar from "../components/Navbar";
export default function About() {
return (
<>
<Navbar />
<div className="content">
<h1 className="header">About us</h1>
<p className="desc">
We are a Charitable Website.
<br />
We are a Community Based Website.
<br />
We are an Open-Source Website.
<br />
We are a User Friendly Website.
<br />
<br />
We care about our users.
<br />
We care about our experts.
<br />
We care about our website.
<br />
<br />
Our goal is to help others.
<br />
Our goal is to make it "alright"
<br />
Our goal is to make the world a better place.
<br />
<br />
<b style={{ fontSize: "1.3em" }}>
We are Itsalright
</b>
<br />
<br />
~ <NAME>, Founder of Itsalright
</p>
</div>
<style jsx>
{`
.header {
font-family: var(--mainfont);
font-weight: 200;
text-align: center;
margin-bottom: 10px;
}
.desc {
font-family: var(--mainfont);
text-align: center;
}
.logo {
font-family: var(--logofont);
}
`}
</style>
</>
);
}
|
<reponame>AlvaWang/spring-may
package net.bambooslips.demo.jpa.repository;
import net.bambooslips.demo.jpa.model.PatentList;
import net.bambooslips.demo.jpa.model.UnitEssential;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import javax.transaction.Transactional;
import java.util.List;
/**
* Created by Administrator on 2017/4/21.
*/
@Transactional
public interface PatentListRepository extends JpaRepository<PatentList, Long> {
/**
* 查询
* @return
*/
@Query(
"Select pl FROM PatentList pl WHERE pl.patentId=:patentId "
)
PatentList findByPatentId(@Param("patentId") String patentId);
@Query(
"Select pl FROM PatentList pl WHERE pl.id=:id "
)
PatentList findById(@Param("id") Long id);
@Query(
"Select pl FROM PatentList pl WHERE pl.entireId=:entireId "
)
List<PatentList> findListByEntireId(@Param("entireId") Long entireId);
}
|
<reponame>buffbabyfinn/newmovietickets
function Ticket(movieName, time, age, quantity) {
this.movieName = movieName;
this.time = time;
this.age = age;
this.quantity = quantity;
this.movieYear = movieYear;
}
Ticket.prototype.price = function() {
var setTicketPrice = 10;
if (this.time < "6:00" || this.age >= 65 || this.movieYear < 2000) {
return (setTicketPrice * 0.8 * this.quantity);
} else {
return setTicketPrice * this.quantity;
}
}
//
// Ticket.prototype.movieYears = function() {
// var starWars = { name: "<NAME>", year: 1979 };
// var superman = { name: "Superman", year: 2001 };
// var spiritedAway = { name: "Spirited Away", year: 1995 };
//
// var movies = { movieList: [starWars, superman, spiritedAway] };
//
// movies.movieList.forEach(function(movie) {
// var movieYear = movie.year;
// return movieYear;
// });
// }
//
// $(function() {
// $("#buyButton").click(function(event) {
// event.preventDefault();
// var age = parseInt($("input#age").val());
// var movies = $('select#movieTitles').val();
// var time = $('select#time').val();
// var quantity = parseInt($('input#quantity').val());
// var newTicket = new Ticket (movies,time,age,quantity);
// $('#ticketInfo').empty();
// $('#ticketPrice').empty();
// $("#ticketInfo").append("<li>Your age: " + age + "</li>" + "<li>Movie Title: " + movies + "</li>" + "<li>Time: " + time + "</li>");
//
// $("#ticketPrice").append("<li>$" + newTicket.price() + ".00</li>");
//
// });
// });
|
#!/bin/bash
n_components=500
for kernel_type in linear rbf; do
for mu in 0.1 1 10 100; do
cmd="python 02_cancer_type_classification/run_cancer_type_classification.py "
cmd+="--tca "
cmd+="--tca_mu $mu "
cmd+="--tca_kernel_type $kernel_type "
cmd+="--tca_n_components $n_components "
cmd+="--custom_genes CDKN2A "
cmd+="--gene_set custom "
cmd+="--holdout_cancer_types LGG "
cmd+="--pancancer_only "
cmd+="--results_dir results/tca/tca_results_${kernel_type}_${mu} "
cmd+="--subset_mad_genes 5000 "
cmd+="2>tca_errors.txt "
echo "Running: $cmd"
eval $cmd
cmd="python 02_cancer_type_classification/run_cancer_type_classification.py "
cmd+="--tca "
cmd+="--tca_mu $mu "
cmd+="--tca_kernel_type $kernel_type "
cmd+="--tca_n_components $n_components "
cmd+="--custom_genes TP53 "
cmd+="--gene_set custom "
cmd+="--holdout_cancer_types LGG "
cmd+="--pancancer_only "
cmd+="--results_dir results/tca/tca_results_${kernel_type}_${mu} "
cmd+="--subset_mad_genes 5000 "
cmd+="2>tca_errors.txt "
echo "Running: $cmd"
eval $cmd
done
done
cmd="python 02_cancer_type_classification/run_cancer_type_classification.py "
cmd+="--custom_genes CDKN2A "
cmd+="--gene_set custom "
cmd+="--holdout_cancer_types LGG "
cmd+="--pancancer_only "
cmd+="--results_dir results/tca/tca_control "
cmd+="--subset_mad_genes 5000 "
cmd+="2>tca_errors.txt "
echo "Running: $cmd"
eval $cmd
cmd="python 02_cancer_type_classification/run_cancer_type_classification.py "
cmd+="--custom_genes TP53 "
cmd+="--gene_set custom "
cmd+="--holdout_cancer_types LGG "
cmd+="--pancancer_only "
cmd+="--results_dir results/tca/tca_control "
cmd+="--subset_mad_genes 5000 "
cmd+="2>tca_errors.txt "
echo "Running: $cmd"
eval $cmd
|
#!/usr/bin/env bash
# This script automates sending a comment to all open issues and PRs in
# a milestone.
set -euo pipefail
trap "rm -f comment.txt" EXIT
cat > comment.txt <<EOF
Dear all,
This is your release captain speaking. 🚂🚂🚂
Branch cut for the **$1 release is scheduled for tomorrow**.
Is this issue / PR going to make it in time? Please change the milestone accordingly.
When in doubt, reach out!
Thank you
EOF
issues=$(hub issue --include-pulls -M "$1" -f "%I%n")
for i in $issues; do
hub api --flat -XPOST "/repos/sourcegraph/sourcegraph/issues/$i/comments" -F "body=@comment.txt"
done
|
<reponame>imarsman/tail
package main
import (
"testing"
)
// Tests and benchmarks
// -----------------------------------------------------
// benchmark
// go test -run=XXX -bench=. -benchmem
// Get allocation information and pipe to less
// go build -gcflags '-m -m' ./*.go 2>&1 |less
// Run all tests
// go test -v
// Run one test and do allocation profiling
// go test -run=XXX -bench=IterativeISOTimestampLong -gcflags '-m' 2>&1 |less
// Run a specific test by function name pattern
// go test -run=TestParsISOTimestamp
//
// go test -run=XXX -bench=.
// go test -bench=. -benchmem -memprofile memprofile.out -cpuprofile cpuprofile.out
// go tool pprof -http=:8080 memprofile.out
// go tool pprof -http=:8080 cpuprofile.out
/*
Interestingly, this implementation is faster than the official one. It tends
to use more CPU (0.1 vs 0.0) and is much larger though.
$: time tail -30 sample/*.txt >/dev/null
real 0m0.031s
user 0m0.010s
sys 0m0.014s
$: time ./tail -n 30 sample/*.txt >/dev/null
real 0m0.006s
user 0m0.002s
sys 0m0.003s
Native tail does slightly better than this tail with stdin
$: time cat sample/1.txt|tail -n 10 >/dev/null
real 0m0.003s
$: time cat sample/1.txt|gotail -n 10 >/dev/null
real 0m0.006s
*/
const (
bechmarkBytesPerOp int64 = 10
)
func init() {
}
func TestRLimit(t *testing.T) {
t.Logf("Limit %+v", setrlimit(1000))
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.