text stringlengths 1 1.05M |
|---|
###############################################################################
#
# Create conda virtual environment for bindz-rbp (dev)
#
# AUTHOR: Maciej_Bak
# AFFILIATION: University_of_Basel
# AFFILIATION: Swiss_Institute_of_Bioinformatics
# CONTACT: maciej.bak@unibas.ch
# CREATED: 04-07-2020
# LICENSE: Apache_2.0
# USAGE: bash create-conda-environment-dev.sh
#
###############################################################################
CWD="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
conda env create --file "$CWD"/../envs/dev.yml
|
####
#### Taxa assignment
#### 2022.5.12
####
#----------- Taxa assignment using claident -----------#
#cd DADA2_OUTPUT_FOLDER
FASTA_FILE="../03_OTUClusteringOut/OTU_seqs.fa"
OUTPUT_FOLDER="04_TaxaAssignmentOut"
mkdir ${OUTPUT_FOLDER}
cd ${OUTPUT_FOLDER}
# Overall_genus
#clmakecachedb 0.9.2021.10.22
clmakecachedb --blastdb=overall_genus --numthreads=72 ${FASTA_FILE} overall_genus_cache
clidentseq --blastdb=overall_genus_cache --numthreads=72 ${FASTA_FILE} overall_genus_clidentseq
classigntax --taxdb=overall_genus --maxpopposer=0.10 --minsoratio=9 overall_genus_clidentseq overall_genus_classigntax
# Check overall_class
clmakecachedb --blastdb=overall_class --numthreads=72 ${FASTA_FILE} overall_class_cache
clidentseq --blastdb=overall_class_cache --numthreads=72 ${FASTA_FILE} overall_class_clidentseq
classigntax --taxdb=overall_class --maxpopposer=0.10 --minsoratio=9 overall_class_clidentseq overall_class_classigntax
## Merge identification results (overall_class + overall_genus)
clmergeassign --priority=descend overall_genus_classigntax overall_class_classigntax merge_classigntax
# Delete large files
rm -r overall_class_cache
rm -r overall_genus_cache
|
#!/bin/sh
if [ -z "$1" ]; then
echo "cd2iso.sh <output>"
exit 1
fi
dd if="/dev/sr0" of="$1"
|
#!/bin/bash -x
if (($# == 2));
then
cat $1 > $2
else
echo "Wrong parameters format!"
fi
|
<filename>SampleBackend/src/main/java/test/backend/www/model/RelativeDistance.java
package test.backend.www.model;
import lombok.Data;
@Data
public class RelativeDistance implements Comparable<RelativeDistance>
{
private final GeoPoint base;
private final Distance distance;
private final Airport airport;
public RelativeDistance(final GeoPoint base, final Airport airport)
{
this.base = base;
this.airport = airport;
this.distance = base.distanceTo(airport.getPosition());
}
@Override
public int compareTo(RelativeDistance other)
{
int partial = other.getDistance().compareTo(getDistance());
if (partial != 0)
{
return partial;
}
else
{
return other.getAirport().getId().compareTo(getAirport().getId());
}
}
}
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolHalfCheetah-v1_doule_ddpg_hardcopy_action_noise_seed4_run4_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolHalfCheetah-v1 --random-seed 4 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolHalfCheetah-v1/doule_ddpg_hardcopy_action_noise_seed4_run4 --continuous-act-space-flag --target-hard-copy-flag
|
from xml.dom.minidom import parseString
from reader.importer.Perseus import PerseusTextImporter
class LexiconImporter():
@staticmethod
def find_perseus_entries(verse):
document = parseString(verse.original_content)
orth_tags = document.getElementsByTagName("orth")
entries = []
for tag in orth_tags:
entries.append(PerseusTextImporter.getText(tag.childNodes))
return entries
|
<reponame>zarina494/fisrt_git_lesson
#ishem znak 4isla
number=int(input())
if number>0: # esli 4islo >0
print(1)
elif number<0: #esli 4islo <0
print(-1)
else:
print(0) #4islo==0 |
#!/bin/sh
java -cp ./classes:mr/:lib/commons-math-2.0.jar:lib/ncl-sfc.jar:lib/ncl-taskschedsim.jar:lib/log4j-api-2.11.1.jar:lib/log4j-core-2.11.1.jar net.gripps.cloud.mapreduce.main.MRTest mr.properties
|
<filename>StageServer/Room/RoomManager.cpp<gh_stars>0
#include <Core/Utils/ConfigReader.h>
#include <StageServer/Room/RoomManager.h>
#include <StageServer/Room/Room.h>
#include <StageServer/Room/RoomUpdateManager.h>
#include <Logic/Stage/ScriptLoadThread.h>
#include <StageServer/User/User.h>
namespace Lunia {
namespace XRated {
namespace StageServer {
RoomManager::RoomManager()
: totalCnt(0)
, maxCnt(0)
, poolSize(0)
{
LoggerInstance().Info("RoomManager::RoomManager()");
Logic::ScriptLoaderInstance();
poolSize = ConfigInstance().Get("RoomSize", uint16(150));
rooms.reserve(poolSize);
for (uint16 i = 0; i < poolSize; i++)
rooms.push_back(std::make_shared<Room>(i));
switch (ConfigInstance().GetKind()) {
case ServerKind::SquareKind:
roomKind = Common::ROOMKIND::SQUARE;
break;
case ServerKind::PVPKind:
roomKind = Common::ROOMKIND::PVP;
break;
case ServerKind::StageKind:
roomKind = Common::ROOMKIND::STAGE;
break;
default:
LoggerInstance().Exception("RoomKind not valid");
break;
}
roomUpdateMgr = std::make_shared<RoomUpdateManager>();
}
RoomManager::~RoomManager()
{
Logic::ScriptLoaderInstance().StopThread();
ReleaseAllRooms();
LoggerInstance().Warn(L"RoomManager::~RoomManager() - maxCnt({}), totalCnt({})", maxCnt, totalCnt);
}
void RoomManager::ReleaseAllRooms()
{
AutoLock lock(cs);
for (size_t index = 0; index < poolSize; ++index)
rooms[index].reset();
rooms.clear();
}
bool RoomManager::RoomJoin(uint16 roomIndex, UserSharedPtr user, const std::string& roomPass)
{
AutoLock lock(cs);
if (roomIndex >= poolSize) {
LoggerInstance().Error(L"RoomJoin Failed :room index over roomindex[{}]", roomIndex);
return false;
}
auto& room = rooms.at(roomIndex);
int threadIndex = room->GetThreadIndex();
if (threadIndex < 0)
{
LoggerInstance().Info((L"Add room to updater thread. (roomIndex:{}, threadIndex:{}, {})", roomIndex, threadIndex, user->GetName().c_str()));
threadIndex = roomUpdateMgr->AddRoom(room);
size_t activeRoomCnt = roomUpdateMgr->GetActiveRoomCnt();
if (activeRoomCnt > maxCnt)
maxCnt = activeRoomCnt;
++totalCnt;
//OutputInstance().Info_RoomCreated(roomIndex, room->GetRoomKind(), activeRoomCnt, maxCnt);
LoggerInstance().Warn(L"a roomCs(roomindex:{}, threadindex:{}, activeRoomCnt:{}) is working by {}", roomIndex, threadIndex, activeRoomCnt, user->GetName().c_str());
}
else
{
LoggerInstance().Info(L"Already activated room. (roomIndex:{}, threadIndex:{}, Members:{} )", roomIndex, threadIndex, room->UserCount());
}
return roomUpdateMgr->JoinUser(threadIndex, room, user, roomPass);
}
/* this method called by UserManager::DBTerminated and UserManager::DBStageMoved */
bool RoomManager::RoomOut(uint16 roomIndex, UserSharedPtr user)
{
AutoLock lock(cs);
if (roomIndex >= poolSize)
return false;
auto& room = rooms.at(roomIndex);
int threadIndex = room->GetThreadIndex();
if (threadIndex < 0)
{
LoggerInstance().Error(L"the roomCs({}) is already out of thread", room->GetRoomIndex());
return false;
}
if (!roomUpdateMgr->PartUser(threadIndex, room, user))
{
LoggerInstance().Error(L"a user({}) is failed to part away from the roomCs(0x%p) on threadindex {}", user->GetName().c_str(), room->GetRoomIndex(), threadIndex);
return false;
}
if (room->UserCount() == 0)
{
LoggerInstance().Warn(L"a roomCs(roomindex:{}, threadindex:{}) went empty by {}", roomIndex, threadIndex, user->GetName().c_str());
roomUpdateMgr->DelRoom(room);
//OutputInstance().Info_RoomReleased(roomIndex, room->GetRoomKind(), NowCnt(), maxCnt);
room->Clear();
}
return true;
}
bool RoomManager::ClearRoom(uint16 roomIndex, UserSharedPtr user)
{
AutoLock lock(cs);
if (roomIndex >= poolSize)
return false;
auto& room = rooms.at(roomIndex);
if (room->UserCount() == 0)
{
//LoggerInstance().Info(( L"a roomCs(0x%p, roomindex:{}, threadindex:{}) went empty by {}", &(room->GetSyncRoom()), roomIndex, threadIndex, user->GetName().c_str() ));
int threadIndex = room->GetThreadIndex();
if (threadIndex >= 0)
{
roomUpdateMgr->DelRoom(room);
}
//OutputInstance().Info_RoomReleased(roomIndex, room->GetRoomKind(), NowCnt(), maxCnt);
room->Clear();
}
return true;
}
void RoomManager::UpdateExpFactor()
{
roomUpdateMgr->UpdateExpFactor();
}
void RoomManager::NoticeHolidayEvent(uint32 eventId, bool start)
{
roomUpdateMgr->NoticeHolidayEvent(eventId, start);
}
bool RoomManager::IsRoomMissionEnded(const uint32 index) const
{
auto& room = rooms.at(index);
return room->IsNowCampfire();
}
bool RoomManager::IsRoomAvailable(const uint32 index, const uint16 id) const
{
auto& room = rooms.at(index);
if (room->GetRoomID() == id)
return true;
else
return false;
}
bool RoomManager::IsUserExist(const std::wstring& userName, const uint32 roomIndex) const
{
auto& room = rooms.at(roomIndex);
return room->IsUserExist(userName);
}
int RoomManager::GetUserCnt(uint16 roomIndex)
{
if (roomIndex >= poolSize)
return 0;
return rooms.at(roomIndex)->UserCount();
}
int RoomManager::NowCnt()
{
return roomUpdateMgr->GetActiveRoomCnt();
}
void RoomManager::RoomsInfo()
{
// [0]2384701928374:1 - 1/4
printf("\n ---- Room List ----\n");
RoomSharedPtr room;
for (size_t i = 0; i < poolSize; ++i)
{
room = rooms.at(i);
if (room->GetThreadIndex() < 0)
continue;
printf("%2d %8d/%2d {}", room->GetIndex(), room->GetCurrentStage().StageGroupHash, room->GetCurrentStage().Level, room->UserCount());
if (((i + 1) % 4) == 0)
printf("\n");
else
printf(" ");
}
printf("\n");
}
void RoomManager::Stop()
{
roomUpdateMgr->Stop();
}
void RoomManager::ShowCashItemViewInfo()
{
printf("\n ---- CashItemViewInfo ----\n");
RoomSharedPtr room;
for (size_t i = 0; i < poolSize; ++i)
{
room = rooms.at(i);
if (room->GetThreadIndex() < 0)
continue;
printf("%2d : %8d", room->GetIndex(), room->GetCashItemViewCnt());
if (((i + 1) % 4) == 0)
printf("\n");
else
printf(" ");
}
printf("\n");
}
void RoomManager::ClearCashItemViewInfo(int roomIndex)
{
if (roomIndex >= 0 && roomIndex < poolSize) {
RoomSharedPtr room = rooms.at(roomIndex);
if (room) {
room->ClearCashItemViewList();
printf("\n room:{} CashItemViewInfo Cleared.\n", room->GetIndex());
}
}
}
RoomManager& RoomManagerInstance()
{
return RoomManager::GetInstance();
}
}
}
} |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package lista9Correcao;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
* @author PauloCésar
*/
public class Tempo {
private int hora=0, min=0, seg=0;
private boolean h=false, m=false, s=true;
public synchronized void contaHora(){
while(true){
// System.out.println("Hora...");
if(h){
hora++;
if(hora == 24){
hora=0;
s=true;
}
notifyAll();
}
h=false;
try {
wait();
} catch (InterruptedException ex) {
Logger.getLogger(Tempo.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
public synchronized void contaMin(){
while(true){
//System.out.println("Minutos...");
if(m){
min++;
if(min==60){
min = 0;
h = true;
}
notifyAll();
}
m=false;
try {
wait();
} catch (InterruptedException ex) {
Logger.getLogger(Tempo.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
public synchronized void contaSeg(){
while(true){
// System.out.println("Segundos...");
seg++;
if(seg==60){
seg=0;
m=true;
notifyAll();
try {
wait();
} catch (InterruptedException ex) {
Logger.getLogger(Tempo.class.getName()).log(Level.SEVERE, null, ex);
}
}
try {
Thread.sleep(1000);
} catch (InterruptedException ex) {
Logger.getLogger(Tempo.class.getName()).log(Level.SEVERE, null, ex);
}
System.out.println(hora+":"+min+":"+seg);
}
}
}
|
#!/bin/bash
# This script parses in the command line parameters from runCust,
# maps them to the correct command line parameters for DispNet training script and launches that task
# The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR
# Parse the command line parameters
# that runCust will give out
DATA_DIR=NONE
LOG_DIR=NONE
CONFIG_DIR=NONE
MODEL_DIR=NONE
# Parsing command line arguments:
while [[ $# > 0 ]]
do
key="$1"
case $key in
-h|--help)
echo "Usage: run_dispnet_training_philly.sh [run_options]"
echo "Options:"
echo " -d|--data-dir <path> - directory path to input data (default NONE)"
echo " -l|--log-dir <path> - directory path to save the log files (default NONE)"
echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)"
echo " -m|--model-dir <path> - directory path to output model file (default NONE)"
exit 1
;;
-d|--data-dir)
DATA_DIR="$2"
shift # pass argument
;;
-p|--config-file-dir)
CONFIG_DIR="$2"
shift # pass argument
;;
-m|--model-dir)
MODEL_DIR="$2"
shift # pass argument
;;
-l|--log-dir)
LOG_DIR="$2"
shift
;;
*)
echo Unkown option $key
;;
esac
shift # past argument or value
done
# Prints out the arguments that were passed into the script
echo "DATA_DIR=$DATA_DIR"
echo "LOG_DIR=$LOG_DIR"
echo "CONFIG_DIR=$CONFIG_DIR"
echo "MODEL_DIR=$MODEL_DIR"
# Run training on philly
# Add the root folder of the code to the PYTHONPATH
export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR
# Run the actual job
python $CONFIG_DIR/examples/AnytimeNetwork/densenet-ann.py \
--data_dir=$DATA_DIR \
--log_dir=$LOG_DIR \
--model_dir=$MODEL_DIR \
--load=${MODEL_DIR}/checkpoint \
--densenet_version=atv2 -n=72 -g=32 -s=36 --ds_name=cifar100 --batch_size=32 --nr_gpu=2 -f=10 --opt_at=-1 --samloss=6 --log_dense_base=2.0 --transition_batch_size=1 --growth_rate_multiplier=1 --bottleneck_width=4.0 --reduction_ratio=0.5 --b_type=bottleneck
|
<reponame>DimaDK24/orders-manager-frontend
import axios from 'axios'
import { getFullApiUrl } from '../../../utils'
const serializeOrder = (order) => {
return {
...order,
orderTimestamp: order.orderTimestamp.unix(),
deliveryDateTimestamp: order.deliveryDateTimestamp.unix(),
}
}
const saveOrder = async (order) => {
const serializedOrder = serializeOrder(order)
const saveOrderUrl = getFullApiUrl('create-order')
return axios.post(saveOrderUrl, serializedOrder)
}
const getOrdersCount = async () => {
const url = getFullApiUrl('get-orders-count')
const response = await axios.get(url)
return response.data.count
}
export { saveOrder, serializeOrder, getOrdersCount }
|
<filename>src/modules/dashboard/view/pages/Achievements/components/AchievementEditor/index.tsx
import React, { useCallback, useEffect } from 'react';
import { FormikHelpers, FormikProvider, useFormik } from 'formik';
import useCreateAchievementController from 'modules/dashboard/infra/controllers/useCreateAchievementController';
import useEditAchievementController from 'modules/dashboard/infra/controllers/useEditAchievementController';
import AchievementSchema from 'modules/dashboard/view/validation/AchievementSchema';
import { Button, ImageInput, Input, Textarea } from 'shared/view/components';
import { TitleInput } from '..';
import { ButtonContainer, Container, Form } from './styles';
interface IFormValues {
name: string;
description: string;
title: string;
image?: string | File;
}
interface IAchievementInitialValues extends IFormValues {
id: string;
titleName: string;
}
const defaultInitialValues: IFormValues = {
name: '',
description: '',
title: '',
image: '',
};
interface AchievementEditorProps {
visible: boolean;
initialValues?: IAchievementInitialValues;
closeEditor: () => void;
updateAchievements: () => void;
}
const AchievementEditor: React.FC<AchievementEditorProps> = ({
visible,
initialValues,
closeEditor,
updateAchievements,
}) => {
const {
loading: loadingCreate,
createAchievement,
} = useCreateAchievementController();
const {
loading: loadingEdit,
editAchievement,
} = useEditAchievementController();
const loading = loadingCreate || loadingEdit;
const handleSubmit = useCallback(
async (values: IFormValues, helpers: FormikHelpers<IFormValues>) => {
const success = initialValues
? await editAchievement({ ...values, id: initialValues.id })
: await createAchievement(values);
if (success) {
closeEditor();
updateAchievements();
helpers.resetForm({
values: defaultInitialValues,
});
}
},
[
closeEditor,
createAchievement,
editAchievement,
initialValues,
updateAchievements,
],
);
const formik = useFormik({
initialValues: defaultInitialValues,
validationSchema: AchievementSchema,
onSubmit: handleSubmit,
});
useEffect(() => {
if (!initialValues)
formik.setValues({
name: defaultInitialValues.name,
description: defaultInitialValues.description,
title: defaultInitialValues.title,
image: defaultInitialValues.image,
});
else
formik.setValues({
name: initialValues.name,
description: initialValues.description,
title: initialValues.title,
image: initialValues.image,
});
}, [initialValues]); // eslint-disable-line react-hooks/exhaustive-deps
return (
<Container $visible={visible}>
<FormikProvider value={formik}>
<Form>
<ImageInput name="image" fullWidth />
<Input name="name" placeholder="Nome da conquista" fullWidth />
<Textarea
name="description"
placeholder="Descreva como obter esta conquista"
fullWidth
/>
<TitleInput
name="title"
placeholder="Completar a conquista garante um título?"
initialValue={initialValues?.titleName}
fullWidth
/>
<ButtonContainer>
<Button type="submit" loading={loading}>
Enviar
</Button>
</ButtonContainer>
</Form>
</FormikProvider>
</Container>
);
};
export default AchievementEditor;
|
<gh_stars>0
package com.pickth.comepennyrenewal.util;
import android.app.Activity;
import java.util.ArrayList;
/**
* 닫히지 않은 액티비티 관리
* Created by Kim on 2016-11-11.
*/
public class ActivityManagement {
//onCreate되는 액티비티들 저장
public static ArrayList<Activity> activityList = new ArrayList<Activity>();
// 저장한 모든 액티비티 종료
public void closeActivity(){
for(int i = 0; i< activityList.size(); i++){
activityList.get(i).finish();
}
}
}
|
<filename>speedviewlib/src/main/java/com/github/anastr/speedviewlib/AwesomeSpeedometer.java
package com.github.anastr.speedviewlib;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.LinearGradient;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.RadialGradient;
import android.graphics.RectF;
import android.graphics.Shader;
import android.graphics.Typeface;
import android.util.AttributeSet;
import java.util.Locale;
/**
* this Library build By <NAME>
* see it on <a href="https://github.com/anastr/SpeedView">GitHub</a>
*/
public class AwesomeSpeedometer extends Speedometer {
private Path indicatorPath = new Path(),
markPath = new Path(),
trianglesPath = new Path();
private Paint indicatorPaint = new Paint(Paint.ANTI_ALIAS_FLAG),
markPaint = new Paint(Paint.ANTI_ALIAS_FLAG),
ringPaint = new Paint(Paint.ANTI_ALIAS_FLAG),
trianglesPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
private RectF speedometerRect = new RectF();
private int speedometerColor = Color.parseColor("#00e6e6")
, trianglesColor = Color.parseColor("#3949ab");
private float indicatorWidth = dpTOpx(25f);
public AwesomeSpeedometer(Context context) {
super(context);
init();
}
public AwesomeSpeedometer(Context context, AttributeSet attrs) {
super(context, attrs);
init();
initAttributeSet(context, attrs);
}
public AwesomeSpeedometer(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
initAttributeSet(context, attrs);
}
@Override
protected void defaultValues() {
MIN_DEGREE = 135;
MAX_DEGREE = 135+320;
super.setSpeedometerWidth(dpTOpx(60));
setBackgroundCircleColor(Color.parseColor("#212121"));
setIndicatorColor(Color.parseColor("#00e6e6"));
setTextColor(Color.parseColor("#ffc260"));
setSpeedTextColor(Color.WHITE);
}
private void init() {
markPaint.setStyle(Paint.Style.STROKE);
textPaint.setTextAlign(Paint.Align.CENTER);
ringPaint.setStyle(Paint.Style.STROKE);
textPaint.setTextSize(dpTOpx(10));
textPaint.setTypeface(Typeface.create(Typeface.DEFAULT, Typeface.BOLD));
}
private void initAttributeSet(Context context, AttributeSet attrs) {
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.AwesomeSpeedometer, 0, 0);
speedometerColor = a.getColor(R.styleable.AwesomeSpeedometer_speedometerColor, speedometerColor);
trianglesColor = a.getColor(R.styleable.AwesomeSpeedometer_trianglesColor, trianglesColor);
indicatorWidth = a.getDimension(R.styleable.AwesomeSpeedometer_indicatorWidth, indicatorWidth);
a.recycle();
initAttributeValue();
}
private void initAttributeValue() {
trianglesPaint.setColor(trianglesColor);
}
@Override
protected void onSizeChanged(int w, int h, int oldW, int oldH) {
super.onSizeChanged(w, h, oldW, oldH);
float risk = getSpeedometerWidth()/2f;
speedometerRect.set(risk, risk, w -risk, h -risk);
float markH = h/22f;
markPath.reset();
markPath.moveTo(w/2f, 0f);
markPath.lineTo(w/2f, markH);
markPath.moveTo(0f, 0f);
markPaint.setStrokeWidth(markH/5f);
trianglesPath.reset();
trianglesPath.moveTo(w/2f, h/20f);
trianglesPath.lineTo(w/2f -(w/40f), 0f);
trianglesPath.lineTo(w/2f +(w/40f), 0f);
trianglesPath.moveTo(0f, 0f);
updateGradient();
}
private void updateGradient() {
float w = getWidth();
float stop = (getWidth()/2f - getSpeedometerWidth()) / (getWidth()/2f);
float stop2 = stop+((1f-stop)*.1f);
float stop3 = stop+((1f-stop)*.36f);
float stop4 = stop+((1f-stop)*.64f);
float stop5 = stop+((1f-stop)*.9f);
int []colors = new int[]{getBackgroundCircleColor(), speedometerColor, getBackgroundCircleColor()
, getBackgroundCircleColor(), speedometerColor, speedometerColor};
Shader radialGradient = new RadialGradient(getWidth() / 2f, getHeight() / 2f, getWidth() / 2f
, colors, new float[]{stop, stop2, stop3, stop4, stop5, 1f}, Shader.TileMode.CLAMP);
ringPaint.setShader(radialGradient);
indicatorPath = new Path();
indicatorPath.moveTo(w/2f, getSpeedometerWidth() + dpTOpx(5));
indicatorPath.lineTo(w/2f -indicatorWidth, getSpeedometerWidth() +indicatorWidth);
indicatorPath.lineTo(w/2f +indicatorWidth, getSpeedometerWidth() +indicatorWidth);
indicatorPath.moveTo(0f, 0f);
Shader linearGradient = new LinearGradient(w/2f, getSpeedometerWidth() + dpTOpx(5), w/2f, getSpeedometerWidth() +indicatorWidth
, getIndicatorColor(), getBackgroundCircleColor(), Shader.TileMode.CLAMP);
indicatorPaint.setShader(linearGradient);
}
private void initDraw() {
ringPaint.setStrokeWidth(getSpeedometerWidth());
markPaint.setColor(getMarkColor());
speedTextPaint.setColor(getSpeedTextColor());
speedTextPaint.setTextSize(getSpeedTextSize());
textPaint.setColor(getTextColor());
textPaint.setTextSize(getTextSize());
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
initDraw();
canvas.drawArc(speedometerRect, 0f, 360f, false, ringPaint);
canvas.save();
canvas.rotate(135f+90f-5f, getWidth()/2f, getHeight()/2f);
for (int i=0; i <= MAX_DEGREE - MIN_DEGREE; i+=4) {
canvas.rotate(4f, getWidth()/2f, getHeight()/2f);
if (i % 40 == 0) {
canvas.drawPath(trianglesPath, trianglesPaint);
canvas.drawText(i*getMaxSpeed()/(MAX_DEGREE -MIN_DEGREE) +""
, getWidth()/2f, getHeight()/20f +textPaint.getTextSize(), textPaint);
}
else {
if (i % 20 == 0)
markPaint.setStrokeWidth(getHeight()/22f/5);
else
markPaint.setStrokeWidth(getHeight()/22f/9);
canvas.drawPath(markPath, markPaint);
}
}
canvas.restore();
canvas.save();
canvas.rotate(90f +getDegree(), getWidth()/2f, getHeight()/2f);
canvas.drawPath(indicatorPath, indicatorPaint);
canvas.restore();
canvas.drawText(String.format(Locale.getDefault(), "%.1f", getCorrectSpeed())
, getWidth()/2f, getHeight()/2f, speedTextPaint);
canvas.drawText(getUnit()
, getWidth()/2f, getHeight()/2f +speedTextPaint.getTextSize(), speedTextPaint);
}
@Override
public void setSpeedometerWidth(float speedometerWidth) {
super.setSpeedometerWidth(speedometerWidth);
float risk = speedometerWidth/2f;
speedometerRect.set(risk, risk, getWidth() -risk, getHeight() -risk);
updateGradient();
invalidate();
}
public int getSpeedometerColor() {
return speedometerColor;
}
public void setSpeedometerColor(int speedometerColor) {
this.speedometerColor = speedometerColor;
updateGradient();
invalidate();
}
public int getTrianglesColor() {
return trianglesColor;
}
public void setTrianglesColor(int trianglesColor) {
this.trianglesColor = trianglesColor;
trianglesPaint.setColor(trianglesColor);
invalidate();
}
public float getIndicatorWidth() {
return indicatorWidth;
}
public void setIndicatorWidth(float indicatorWidth) {
this.indicatorWidth = indicatorWidth;
updateGradient();
invalidate();
}
@Deprecated
@Override
public int getLowSpeedColor() {
return super.getLowSpeedColor();
}
@Deprecated
@Override
public void setLowSpeedColor(int lowSpeedColor) {
}
@Deprecated
@Override
public int getMediumSpeedColor() {
return super.getMediumSpeedColor();
}
@Deprecated
@Override
public void setMediumSpeedColor(int mediumSpeedColor) {
}
@Deprecated
@Override
public int getHighSpeedColor() {
return super.getHighSpeedColor();
}
@Deprecated
@Override
public void setHighSpeedColor(int highSpeedColor) {
}
}
|
#### COLOUR
tm_icon="♟"
tm_color_active=colour213
tm_color_inactive=colour241
tm_color_feature=colour4
tm_color_music=colour203
# separators
tm_separator_left_bold="◀"
tm_separator_left_thin="❮"
tm_separator_right_bold="▶"
tm_separator_right_thin="❯"
set -g status-left-length 32
set -g status-right-length 150
set -g status-interval 5
# default statusbar colors
# set-option -g status-bg colour0
set-option -g status-fg $tm_color_active
set-option -g status-bg default
set-option -g status-attr default
# default window title colors
set-window-option -g window-status-fg $tm_color_inactive
set-window-option -g window-status-bg default
set -g window-status-format "#I #W"
# active window title colors
set-window-option -g window-status-current-fg $tm_color_active
set-window-option -g window-status-current-bg default
set-window-option -g window-status-current-format "#[bold]#I #W"
# pane border
set-option -g pane-border-fg $tm_color_inactive
set-option -g pane-active-border-fg $tm_color_active
# message text
set-option -g message-bg default
set-option -g message-fg $tm_color_active
# pane number display
set-option -g display-panes-active-colour $tm_color_active
set-option -g display-panes-colour $tm_color_inactive
# clock
set-window-option -g clock-mode-colour $tm_color_active
tm_spotify="#[fg=$tm_color_music]#(osascript ~/.dotfiles/applescripts/spotify.scpt)"
tm_itunes="#[fg=$tm_color_music]#(osascript ~/.dotfiles/applescripts/itunes.scpt)"
tm_battery="#(~/.dotfiles/bin/battery_indicator.sh)"
tm_date="#[fg=$tm_color_inactive] %R %d %b"
tm_host="#[fg=$tm_color_feature,bold]#h"
tm_session_name="#[fg=$tm_color_feature,bold]$tm_icon #S"
set -g status-left $tm_session_name' '
set -g status-right $tm_itunes' '$tm_rdio' '$tm_spotify' '$tm_date' '$tm_host
|
let net = require('net');
let socket = new net.Socket();
socket.connect(8080, 'localhost', function () {
socket.write('hello');
});
socket.setEncoding('utf8');
socket.on('data', function (data) {
console.log(data);
});
setTimeout(function () {
//要求关闭跟服务器的连接
socket.end();
}, 5000); |
class CustomerAPI:
def __init__(self):
self.customers = dict()
def add_customer(self, customer):
# Add the customer to the internal store
self.customers[customer.id] = customer
def get_customer(self, customer_id):
# Return the customer with the given id
return self.customers[customer_id]
def delete_customer(self, customer_id):
# Delete the customer with the given id
del self.customers[customer_id]
def update_customer(self, customer):
# Update the customer's information
self.customers[customer.id] = customer |
#!/bin/bash
echo "======Put public key to authorized_keys======"
PUBKEY="ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDbUcZEwI15yaNgPPGdzewWuVhxHu+PaI2gjvtrbv0ptr30vsVlI7x5UZ0iCKQMm4MWJl2iOpnuo7nBcTPfABnPAecqsnjmD9I5FvgQEMtD3KSH6VJpPGYjhRVGm+RaESC8JHVqXSWEMrsoVHfypn8CMOJ1dXAvXj5TTgLyoaURWxTxMdtqcJV13ybMZ5P7CfFb5LWLfk97/5HXkyog5Jg++fixqUvW1MPONsNnLYQNIjXXXaAt0SVHiKmb48LsgLHQu+rly4uUuHCBWah0teaca3eEPkZHs5t0RrWmzZOQldjbp7zIro250Rj4zXSxq5RqSZPxUtsEjT5ytTWU3tfeDT+xFFb+ZpxaUNsTQ9UmvJLwXqXNVaP9DH8PrNIyMUfZjwO4OntXHiiQdssAxbHOiwPBdKoxJAPMq0VWPVFJ62Eijs5UTpjNMt7LteZctIoW+TbHPZ+q8yhJGvKNg5GPo+T/ZZvY6yr4PmdMf4T/ZMqp+ScrRmzDyhCj8wPyDvc= a18565272@CAB-WSM-0006725"
sudo echo $PUBKEY > ~/.ssh/authorized_keys
echo "======Create user======"
USERNAME="yc-user"
adduser --disabled-password --gecos "" $USERNAME
echo "$USERNAME ALL=(ALL:ALL) NOPASSWD: ALL" | sudo tee /etc/sudoers.d/$USERNAME
sudo -i -u $USERNAME bash << EOF
echo "Run installation as user:"
whoami
mkdir /home/$USERNAME/.ssh
touch /home/$USERNAME/.ssh/authorized_keys
echo $PUBKEY > /home/$USERNAME/.ssh/authorized_keys
chown -R $USERNAME:$USERNAME /home/$USERNAME/.ssh
chmod 0700 /home/$USERNAME/.ssh
chmod 0600 /home/$USERNAME/.ssh/authorized_keys
echo "======Install Ruby======"
sudo apt update
sudo apt install -y ruby-full ruby-bundler build-essential
ruby -v
bundler -v
echo "======Install and run MongoDB======"
wget -qO - https://www.mongodb.org/static/pgp/server-4.2.asc | sudo apt-key add -
echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/4.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.2.list
sudo apt-get update
sudo apt-get install -y mongodb-org
sudo systemctl start mongod
sudo systemctl enable mongod
sudo systemctl status mongod
echo "======Deploy Reddit======"
sudo apt-get update
sudo apt-get install -y git
cd ~ && git clone -b monolith https://github.com/express42/reddit.git
cd reddit && bundle install
puma -d
echo "======Project processes======"
ps aux | grep puma
echo "Finish installation as user $(whoami)"
EOF
|
<filename>javatests/dagger/internal/codegen/SubcomponentBuilderValidationTest.java
/*
* Copyright (C) 2015 The Dagger Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger.internal.codegen;
import static com.google.testing.compile.CompilationSubject.assertThat;
import static dagger.internal.codegen.Compilers.daggerCompiler;
import static dagger.internal.codegen.binding.ComponentCreatorAnnotation.SUBCOMPONENT_BUILDER;
import static dagger.internal.codegen.binding.ErrorMessages.creatorMessagesFor;
import com.google.testing.compile.Compilation;
import com.google.testing.compile.JavaFileObjects;
import dagger.internal.codegen.binding.ErrorMessages;
import javax.tools.JavaFileObject;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link dagger.Subcomponent.Builder} validation. */
@RunWith(JUnit4.class)
public class SubcomponentBuilderValidationTest {
private static final ErrorMessages.ComponentCreatorMessages MSGS =
creatorMessagesFor(SUBCOMPONENT_BUILDER);
@Test
public void testMoreThanOneArgFails() {
JavaFileObject childComponentFile = JavaFileObjects.forSourceLines("test.ChildComponent",
"package test;",
"",
"import dagger.Subcomponent;",
"",
"@Subcomponent",
"abstract class ChildComponent {",
" @Subcomponent.Builder",
" interface Builder {",
" ChildComponent build();",
" Builder set(String s, Integer i);",
" Builder set(Number n, Double d);",
" }",
"}");
Compilation compilation = daggerCompiler().compile(childComponentFile);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContaining(MSGS.setterMethodsMustTakeOneArg())
.inFile(childComponentFile)
.onLine(10);
assertThat(compilation)
.hadErrorContaining(MSGS.setterMethodsMustTakeOneArg())
.inFile(childComponentFile)
.onLine(11);
}
@Test
public void testInheritedMoreThanOneArgFails() {
JavaFileObject childComponentFile = JavaFileObjects.forSourceLines("test.ChildComponent",
"package test;",
"",
"import dagger.Subcomponent;",
"",
"@Subcomponent",
"abstract class ChildComponent {",
" interface Parent {",
" ChildComponent build();",
" Builder set1(String s, Integer i);",
" }",
"",
" @Subcomponent.Builder",
" interface Builder extends Parent {}",
"}");
Compilation compilation = daggerCompiler().compile(childComponentFile);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContaining(
String.format(
MSGS.inheritedSetterMethodsMustTakeOneArg(),
"set1(java.lang.String,java.lang.Integer)"))
.inFile(childComponentFile)
.onLine(13);
}
@Test
public void testSetterReturningNonVoidOrBuilderFails() {
JavaFileObject childComponentFile = JavaFileObjects.forSourceLines("test.ChildComponent",
"package test;",
"",
"import dagger.Subcomponent;",
"",
"@Subcomponent",
"abstract class ChildComponent {",
" @Subcomponent.Builder",
" interface Builder {",
" ChildComponent build();",
" String set(Integer i);",
" }",
"}");
Compilation compilation = daggerCompiler().compile(childComponentFile);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContaining(MSGS.setterMethodsMustReturnVoidOrBuilder())
.inFile(childComponentFile)
.onLine(10);
}
@Test
public void testInheritedSetterReturningNonVoidOrBuilderFails() {
JavaFileObject childComponentFile = JavaFileObjects.forSourceLines("test.ChildComponent",
"package test;",
"",
"import dagger.Subcomponent;",
"",
"@Subcomponent",
"abstract class ChildComponent {",
" interface Parent {",
" ChildComponent build();",
" String set(Integer i);",
" }",
"",
" @Subcomponent.Builder",
" interface Builder extends Parent {}",
"}");
Compilation compilation = daggerCompiler().compile(childComponentFile);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContaining(
String.format(
MSGS.inheritedSetterMethodsMustReturnVoidOrBuilder(), "set(java.lang.Integer)"))
.inFile(childComponentFile)
.onLine(13);
}
@Test
public void testGenericsOnSetterMethodFails() {
JavaFileObject childComponentFile = JavaFileObjects.forSourceLines("test.ChildComponent",
"package test;",
"",
"import dagger.Subcomponent;",
"",
"@Subcomponent",
"abstract class ChildComponent {",
" @Subcomponent.Builder",
" interface Builder {",
" ChildComponent build();",
" <T> Builder set(T t);",
" }",
"}");
Compilation compilation = daggerCompiler().compile(childComponentFile);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContaining(MSGS.methodsMayNotHaveTypeParameters())
.inFile(childComponentFile)
.onLine(10);
}
@Test
public void testGenericsOnInheritedSetterMethodFails() {
JavaFileObject childComponentFile = JavaFileObjects.forSourceLines("test.ChildComponent",
"package test;",
"",
"import dagger.Subcomponent;",
"",
"@Subcomponent",
"abstract class ChildComponent {",
" interface Parent {",
" ChildComponent build();",
" <T> Builder set(T t);",
" }",
"",
" @Subcomponent.Builder",
" interface Builder extends Parent {}",
"}");
Compilation compilation = daggerCompiler().compile(childComponentFile);
assertThat(compilation).failed();
assertThat(compilation)
.hadErrorContaining(
String.format(MSGS.inheritedMethodsMayNotHaveTypeParameters(), "<T>set(T)"))
.inFile(childComponentFile)
.onLine(13);
}
}
|
import numpy as np
import random
from sklearn.metrics import mean_squared_error
# Fitness function that calculates the mean-squared-error (MSE)
def mse_fitness(coefficients):
pred = np.array([coefficients[0]+coefficients[1]*x[i]+coefficients[2]*(x[i]**2) for i in range(len(x))])
err = np.sum((pred - y)**2)/len(y)
return err
# Genetic function that takes a number of individuals in population, epochs, mutation probabilty
def genetic(pop_size, iterations, mut_prob):
# Create the initial population
population = np.random.uniform(-1, 1, (pop_size, 3))
fits = np.array([mse_fitness(individual) for individual in population])
# Find the best initial fit
best_fit_current_gen = np.min(fits)
best_fit_overall = best_fit_current_gen
best_ind = population[np.argmin(fits)]
# Iterate through epochs
for _ in range(iterations):
# Select two random indices of the population
i1, i2 = np.random.choice(np.arange(pop_size), size=2, replace=False)
# Create a child by combining the two individuals
child = np.random.choice([population[i1], population[i2]])
# Perform mutation according to the probability
if np.random.random() < mut_prob:
# Generate a random index and calculate new coefficient
j = np.random.randint(3)
child[j] = np.random.uniform(-1, 1)
# Append the child to the population
population = np.append(population, np.expand_dims(child, axis=0))
# Calculate the new fits
fits = np.append(fits, mse_fitness(child))
# Check if new fit is the best fit
if fits[-1] < best_fit_overall:
best_fit_overall = fits[-1]
best_ind = child
# Return the best coefficient and best fit
return best_ind, best_fit_overall
# Define data
x = [1, 2, 3]
y = [2, 4, 6]
# Set parameters for the genetic algorithm
pop_size = 10
iterations = 100
mut_prob = 0.3
# Call the genetic algorithm
coef_final , MSE_final = genetic(pop_size, iterations, mut_prob)
# Print the results
print(coef_final, MSE_final) |
#!/bin/bash
#
# Simple script to create a rootfs for aarch64 platforms including support
# for Kernel modules created by the rest of the scripting found in this
# module.
#
# Use this script to populate the second partition of disk images created with
# the simpleimage script of this project.
#
set -e
BUILD="../build"
DEST="$1"
DISTRO="$2"
VARIANT="$3"
BUILD_ARCH="$4"
MODEL="$5"
shift 5
export LC_ALL=C
if [ -z "$MODEL" ]; then
echo "Usage: $0 <destination-folder> <distro> <variant: mate, i3 or minimal> <arch> <model> <packages...>"
exit 1
fi
if [ "$(id -u)" -ne "0" ]; then
echo "This script requires root."
exit 1
fi
DEST=$(readlink -f "$DEST")
if [ -n "$LINUX" -a "$LINUX" != "-" ]; then
LINUX=$(readlink -f "$LINUX")
fi
if [ ! -d "$DEST" ]; then
echo "Destination $DEST not found or not a directory."
exit 1
fi
if [ "$(ls -A -Ilost+found -Iboot $DEST)" ]; then
echo "Destination $DEST is not empty. Aborting."
exit 1
fi
if [ -z "$DISTRO" ]; then
DISTRO="xenial"
fi
if [ -n "$BOOT" ]; then
BOOT=$(readlink -f "$BOOT")
fi
TEMP=$(mktemp -d)
cleanup() {
if [[ "$DEBUG" == "shell" ]]; then
pushd "$DEST"
bash
popd
fi
if [ -e "$DEST/proc/mdstat" ]; then
umount "$DEST/proc/mdstat" || true
fi
if [ -e "$DEST/proc/cmdline" ]; then
umount "$DEST/proc"
fi
if [ -d "$DEST/sys/kernel" ]; then
umount "$DEST/sys"
fi
umount "$DEST/tmp" || true
if [ -d "$TEMP" ]; then
rm -rf "$TEMP"
fi
}
trap cleanup EXIT
ROOTFS=""
TAR_OPTIONS=""
DISTRIB=""
case $DISTRO in
arch)
ROOTFS="http://archlinuxarm.org/os/ArchLinuxARM-aarch64-latest.tar.gz"
TAR_OPTIONS="-z"
DISTRIB="arch"
;;
xenial|zesty|artful|bionic)
version=$(curl -s https://api.github.com/repos/ayufan-rock64/linux-rootfs/releases/latest | jq -r ".tag_name")
ROOTFS="https://github.com/ayufan-rock64/linux-rootfs/releases/download/${version}/ubuntu-${DISTRO}-${VARIANT}-${version}-${BUILD_ARCH}.tar.xz"
FALLBACK_ROOTFS="https://github.com/ayufan-rock64/linux-rootfs/releases/download/${version}/ubuntu-${DISTRO}-minimal-${version}-${BUILD_ARCH}.tar.xz"
TAR_OPTIONS="-J --strip-components=1 binary"
DISTRIB="ubuntu"
;;
sid|jessie|stretch)
version=$(curl -s https://api.github.com/repos/ayufan-rock64/linux-rootfs/releases/latest | jq -r ".tag_name")
ROOTFS="https://github.com/ayufan-rock64/linux-rootfs/releases/download/${version}/debian-${DISTRO}-${VARIANT}-${version}-${BUILD_ARCH}.tar.xz"
FALLBACK_ROOTFS="https://github.com/ayufan-rock64/linux-rootfs/releases/download/${version}/debian-${DISTRO}-minimal-${version}-${BUILD_ARCH}.tar.xz"
TAR_OPTIONS="-J --strip-components=1 binary"
DISTRIB="debian"
;;
*)
echo "Unknown distribution: $DISTRO"
exit 1
;;
esac
mkdir -p $BUILD
mkdir -p tmp
TARBALL="tmp/$(basename $ROOTFS)"
mkdir -p "$BUILD"
if [ ! -e "$TARBALL" ]; then
echo "Downloading $DISTRO rootfs tarball ..."
pushd tmp
if ! flock "$(basename "$ROOTFS").lock" wget -c "$ROOTFS"; then
TARBALL="tmp/$(basename "$FALLBACK_ROOTFS")"
echo "Downloading fallback $DISTRO rootfs tarball ..."
flock "$(basename "$FALLBACK_ROOTFS").lock" wget -c "$FALLBACK_ROOTFS"
fi
popd
fi
# Extract with BSD tar
echo -n "Extracting ... "
set -x
tar -xf "$TARBALL" -C "$DEST" $TAR_OPTIONS
echo "OK"
# Add qemu emulation.
cp /usr/bin/qemu-aarch64-static "$DEST/usr/bin"
cp /usr/bin/qemu-arm-static "$DEST/usr/bin"
# Prevent services from starting
cat > "$DEST/usr/sbin/policy-rc.d" <<EOF
#!/bin/sh
exit 101
EOF
chmod a+x "$DEST/usr/sbin/policy-rc.d"
do_chroot() {
cmd="$@"
mount -o bind /tmp "$DEST/tmp"
chroot "$DEST" mount -t proc proc /proc
chroot "$DEST" mount -t sysfs sys /sys
chroot "$DEST" mount --bind /dev/null /proc/mdstat
chroot "$DEST" $cmd
chroot "$DEST" umount /sys /proc/mdstat /proc
umount "$DEST/tmp"
}
do_install() {
FILE=$(basename "$1")
cp "$1" "$DEST/$(basename "$1")"
yes | do_chroot dpkg -i "$FILE"
do_chroot rm "$FILE"
}
# Run stuff in new system.
case $DISTRIB in
arch)
echo "No longer supported"
exit 1
;;
debian|ubuntu)
rm "$DEST/etc/resolv.conf"
cp /etc/resolv.conf "$DEST/etc/resolv.conf"
case "$VARIANT" in
openmediavault)
DEBUSER=root
DEBUSERPW=openmediavault
;;
*)
DEBUSER=rock64
DEBUSERPW=rock64
;;
esac
EXTRA_ARCHS="arm64"
do_chroot apt-key add - < rootfs/ayufan-ppa.gpg
do_chroot apt-key add - < rootfs/ayufan-deb-ayufan-eu.gpg
cat <<EOF > "$DEST/install_script.bash"
#!/bin/sh
set -ex
export DEBIAN_FRONTEND=noninteractive
locale-gen en_US.UTF-8
# add non-free
sed -i 's/main contrib$/main contrib non-free/g' /etc/apt/sources.list
if [[ "$DISTRO" == "stretch" ]]; then
add-apt-repository "deb http://ppa.launchpad.net/ayufan/rock64-ppa/ubuntu bionic main"
elif [[ "$DISTRIB" == "debian" ]]; then
add-apt-repository "deb http://ppa.launchpad.net/ayufan/rock64-ppa/ubuntu xenial main"
else
add-apt-repository "deb http://ppa.launchpad.net/ayufan/rock64-ppa/ubuntu $DISTRO main"
fi
apt-get -y update
apt-get -y install dosfstools curl xz-utils iw rfkill wpasupplicant openssh-server alsa-utils \
nano git build-essential vim jq wget ca-certificates software-properties-common dirmngr \
gdisk parted figlet htop fake-hwclock usbutils sysstat fping iperf3 iozone3 ntp \
network-manager psmisc flash-kernel u-boot-tools ifupdown resolvconf \
net-tools mtd-utils rsync
if [[ "$DISTRIB" == "debian" ]]; then
apt-get -y install firmware-realtek
elif [[ "$DISTRIB" == "ubuntu" ]]; then
apt-get -y install landscape-common linux-firmware
fi
#apt-get dist-upgrade -y
fake-hwclock save
if [[ "$DEBUSER" != "root" ]]; then
adduser --gecos $DEBUSER --disabled-login $DEBUSER --uid 1000
chown -R 1000:1000 /home/$DEBUSER
usermod -a -G sudo,audio,adm,input,video,plugdev,ssh $DEBUSER
chage -d 0 "$DEBUSER"
fi
echo "$DEBUSER:$DEBUSERPW" | chpasswd
apt-get clean
EOF
do_chroot bash "/install_script.bash"
rm -f "$DEST/install_script.bash"
echo -n UTC > "$DEST/etc/timezone"
case $MODEL in
rock64)
echo "Pine64 Rock64" > "$DEST/etc/flash-kernel/machine"
;;
rockpro64)
echo "Pine64 RockPro64" > "$DEST/etc/flash-kernel/machine"
;;
*)
echo "Unsupported model: $MODEL"
;;
esac
cat > "$DEST/etc/apt/sources.list.d/ayufan-rock64.list" <<EOF
deb http://deb.ayufan.eu/orgs/ayufan-rock64/releases /
# uncomment to use pre-release kernels and compatibility packages
# deb http://deb.ayufan.eu/orgs/ayufan-rock64/pre-releases /
EOF
cat > "$DEST/etc/hostname" <<EOF
$MODEL
EOF
cat > "$DEST/etc/fstab" <<EOF
LABEL=boot /boot/efi vfat defaults,sync 0 0
EOF
cat > "$DEST/etc/hosts" <<EOF
127.0.0.1 localhost
127.0.1.1 $MODEL
# The following lines are desirable for IPv6 capable hosts
::1 localhost ip6-localhost ip6-loopback
fe00::0 ip6-localnet
ff00::0 ip6-mcastprefix
ff02::1 ip6-allnodes
ff02::2 ip6-allrouters
EOF
cat >> "$DEST/etc/securetty" <<EOF
# Rock 64
ttyFIQ0
EOF
for arch in $EXTRA_ARCHS; do
if [[ "$arch" != "$BUILD_ARCH" ]]; then
do_chroot dpkg --add-architecture "$arch"
do_chroot apt-get update -y
do_chroot apt-get install -y "libc6:$arch" "libstdc++6:$arch"
fi
done
for package in "$@"; do
do_install "$package"
done
case "$VARIANT" in
mate)
do_chroot /usr/local/sbin/install_desktop.sh mate
do_chroot systemctl set-default graphical.target
;;
i3)
do_chroot /usr/local/sbin/install_desktop.sh i3
do_chroot systemctl set-default graphical.target
;;
lxde)
do_chroot /usr/local/sbin/install_desktop.sh lxde
do_chroot systemctl set-default graphical.target
;;
openmediavault)
do_chroot /usr/local/sbin/install_openmediavault.sh
;;
containers)
do_chroot /usr/local/sbin/install_container_linux.sh
;;
esac
do_chroot systemctl enable ssh-keygen
sed -i 's|After=rc.local.service|#\0|;' "$DEST/lib/systemd/system/serial-getty@.service"
rm -f "$DEST/etc/resolv.conf"
rm -f "$DEST"/etc/ssh/ssh_host_*
do_chroot ln -s /run/resolvconf/resolv.conf /etc/resolv.conf
do_chroot apt-get clean
;;
*)
echo "Unsupported distrib:$DISTRIB and distro:$DISTRO..."
exit 1
;;
esac
# Bring back folders
mkdir -p "$DEST/lib"
mkdir -p "$DEST/usr"
# Clean up
rm -f "$DEST/usr/bin/qemu-arm-static"
rm -f "$DEST/usr/bin/qemu-aarch64-static"
rm -f "$DEST/usr/sbin/policy-rc.d"
rm -f "$DEST/usr/local/bin/mdadm"
rm -f "$DEST/var/lib/dbus/machine-id"
rm -f "$DEST/SHA256SUMS"
echo "Done - installed rootfs to $DEST"
|
package com.github.mozvip.theaudiodb;
import java.io.IOException;
import java.util.Optional;
import com.github.mozvip.theaudiodb.model.AudioDbAlbum;
import com.github.mozvip.theaudiodb.model.AudioDbResponse;
import retrofit2.Retrofit;
import retrofit2.converter.jackson.JacksonConverterFactory;
public class TheAudioDbClient {
public static final class Builder {
private Builder() {
}
private String apiKey = "1";
public Builder apiKey( String apiKey ) {
this.apiKey = apiKey;
return this;
}
public TheAudioDbClient build() {
return new TheAudioDbClient( apiKey );
}
}
public static Builder Builder() {
return new Builder();
}
private String apiKey;
private TheAudioDbService service = null;
public TheAudioDbClient(String apiKey) {
this.apiKey = apiKey;
Retrofit retrofit = new Retrofit.Builder()
.baseUrl("http://www.theaudiodb.com")
.addConverterFactory(JacksonConverterFactory.create())
.build();
service = retrofit.create(TheAudioDbService.class);
}
public Optional<AudioDbResponse> searchArtist(String artistName) throws IOException {
return Optional.ofNullable( service.searchArtist(apiKey, artistName).execute().body() );
}
public Optional<AudioDbResponse> searchAlbum(String artistName, String albumName) throws IOException {
return Optional.ofNullable( service.searchAlbum(apiKey, artistName, albumName).execute().body() );
}
public AudioDbResponse searchAlbums(String artistName) throws IOException {
return service.searchAlbums(apiKey, artistName).execute().body();
}
public AudioDbResponse getAlbum(long tadbAlbumId) throws IOException {
return service.getAlbum(apiKey, tadbAlbumId).execute().body();
}
}
|
#!/bin/bash
if [[ $# -eq 2 ]]
then
echo "This script cleans the given aws build directories."
echo "Usage: $0 directory [directories ...]"
echo "Example: clean.sh regexp"
fi
for project_dir in "$@"
do
rm -r "$project_dir/verif/sim/"*
rm "$project_dir/vivado"*.jou
rm "$project_dir/vivado"*.log
rm "$project_dir/build/src_post_encryption/"*
rm "$project_dir/build/checkpoints/"*.dcp
rm "$project_dir/build/checkpoints/to_aws/"*.dcp
rm "$project_dir/build/scripts/"*.log
rm "$project_dir/build/scripts/"*.out
rm "$project_dir/build/scripts/fsm_encoding.os"
rm "$project_dir/build/scripts/hd_visual/"*.tcl
rm "$project_dir/build/reports/"*.rpt
rm "$project_dir/design/ip/axi_interconnect_top/hdl/"*
rm "$project_dir/design/ip/axi_interconnect_top/"*.dcp
rm "$project_dir/design/ip/axi_interconnect_top/axi_interconnect_top_"*
rm "$project_dir/design/ip/axi_interconnect_top/axi_interconnect_top."{veo,vho,xml}
rm -r "$project_dir/design/ip/axi_interconnect_top/"{doc,hdl,sim,simulation,synth}
done
|
#!/bin/bash
set -e
if [ "$1" = 'redis' ]; then
redis-server --daemonize yes --protected-mode no
if [ -z "$2" ]; then
OPTIONS=--requests 2000 --clients 1 -t get,set,incr
else
case "$2" in
# 2vCPU, 8GB: around 2000ms latency
1) OPTIONS="--requests 20000 --clients 1 -t get,set,incr" ;;
2) OPTIONS="--requests 20000 --clients 1 -t lpush,lpop,incr" ;;
3) OPTIONS="--requests 20000 --clients 10 -t get,set,incr,lpush,lpop,incr"
esac
fi
echo "OPTIONS: $OPTIONS"
python3 aiohttp-redis-benchmark.py $OPTIONS
elif [ "$1" = 'redis-alone' ]; then
redis-server --protected-mode no
else
echo "redis <1,2> OR redis-alone"
/bin/bash
fi
|
import random
def rock_paper_scissors(user_choice):
choices = ["rock", "paper", "scissors"]
computer_choice = random.choice(choices)
if user_choice not in choices:
return "Invalid input. Please choose rock, paper, or scissors."
if user_choice == computer_choice:
return "It's a tie!"
elif (user_choice == "rock" and computer_choice == "scissors") or (user_choice == "paper" and computer_choice == "rock") or (user_choice == "scissors" and computer_choice == "paper"):
return "You win!"
else:
return "You lose!" |
import React, {
StyleSheet,
PropTypes,
View,
Text,
} from 'react-native';
export default class MyComponent extends React.Component {
render() {
return (
<View>
<Text>I wonder if there will be any problems...</Text>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
},
});
|
const { newMappingTransform } = require("./helpers/factories");
const promoteFunction = require("../lib/request-reply-promoter");
const { PassThrough, Readable } = require("stream");
const { Message } = require("@projectriff/message");
describe("function promoter =>", () => {
const data = [1, 2, 4];
const userFunction = (x) => x ** 2;
const streamingUserFunction = (inputs, outputs) => {
const inputStream = inputs.$order[0];
const outputStream = outputs.$order[0];
inputStream.pipe(newMappingTransform(userFunction)).pipe(outputStream);
};
streamingUserFunction.$interactionModel = "node-streams";
let streamingOutput;
const expectedResults = data.map(userFunction);
let source;
beforeEach(() => {
source = Readable.from(
data.map((payload) => Message.builder().payload(payload).build())
);
streamingOutput = new PassThrough({ objectMode: true });
});
afterEach(() => {
source.destroy();
streamingOutput.destroy();
});
it("promotes request-reply functions to streaming", (done) => {
let index = 0;
streamingOutput.on("data", (chunk) => {
expect(index).toBeLessThan(
expectedResults.length,
`expected only ${expectedResults.length} element(s)`
);
expect(chunk).toEqual(expectedResults[index++]);
});
streamingOutput.on("end", () => {
done();
});
const result = promoteFunction(userFunction);
result({ $order: [source] }, { $order: [streamingOutput] });
});
it("returns streaming functions as-is", (done) => {
let index = 0;
streamingOutput.on("data", (chunk) => {
expect(index).toBeLessThan(
expectedResults.length,
`expected only ${expectedResults.length} element(s)`
);
expect(chunk).toEqual(expectedResults[index++]);
});
streamingOutput.on("end", () => {
done();
});
const result = promoteFunction(streamingUserFunction);
result({ $order: [source] }, { $order: [streamingOutput] });
});
});
|
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
# flake8: noqa
from . import class_balanced_losses
from . import cross_entropy_loss
from . import triplet_loss
from . import recall_loss
from . import cross_focal_loss
from . import cross_entropy_loss_with_ignore
from . import am_softmax_loss_with_ignore
|
#!/usr/bin/env bash
#
# Onix Probare - Copyright (c) 2018-2020 by www.gatblau.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
#
# Contributors to this project, hereby assign copyright in this code to the project,
# to be licensed under the same terms as the rest of the code.
#
VERSION=$1
if [ $# -eq 0 ]; then
echo "An image version is required for Onix Probare. Provide it as a parameter."
echo "Usage is: sh version.sh [APP VERSION] - e.g. sh version.sh v1.0.0"
exit 1
fi
rm version
# creates a TAG for the newly built docker images
DATE=`date '+%d%m%y%H%M%S'`
HASH=`git rev-parse --short HEAD`
TAG="${VERSION}-${HASH}-${DATE}"
echo ${TAG} >> version
echo "TAG is: ${TAG}"
sleep 2 |
package frc.robot.subsystems.shooter;
import com.ctre.phoenix.motorcontrol.NeutralMode;
import com.ctre.phoenix.motorcontrol.StatusFrameEnhanced;
import com.ctre.phoenix.motorcontrol.SupplyCurrentLimitConfiguration;
import com.ctre.phoenix.motorcontrol.TalonFXSimCollection;
import com.ctre.phoenix.motorcontrol.can.WPI_TalonFX;
import com.ctre.phoenix.sensors.SensorVelocityMeasPeriod;
import edu.wpi.first.math.controller.PIDController;
import edu.wpi.first.math.controller.SimpleMotorFeedforward;
import edu.wpi.first.math.filter.LinearFilter;
import edu.wpi.first.math.numbers.N1;
import edu.wpi.first.math.system.LinearSystem;
import edu.wpi.first.math.system.plant.DCMotor;
import edu.wpi.first.math.system.plant.LinearSystemId;
import edu.wpi.first.wpilibj.RobotController;
import edu.wpi.first.wpilibj.simulation.FlywheelSim;
import edu.wpi.first.wpilibj2.command.SubsystemBase;
import frc.robot.Constants;
import frc.robot.custom.ArborMath;
import frc.robot.subsystems.VisionSubsystem.VisionSupplier;
import io.github.oblarg.oblog.Loggable;
import io.github.oblarg.oblog.annotations.Config;
import io.github.oblarg.oblog.annotations.Log;
public class FlywheelSubsystem extends SubsystemBase implements Loggable {
private final WPI_TalonFX mMaster = new WPI_TalonFX(Constants.CAN.kFlywheelMaster);
private final WPI_TalonFX mSlave = new WPI_TalonFX(Constants.CAN.kFlywheelSlave);
private final TalonFXSimCollection mMasterSim = mMaster.getSimCollection();
private PIDController mPID = new PIDController(20d/9570d, 0, 0);
private final SimpleMotorFeedforward mFeedForward = Constants.Flywheel.kFeedForward;
public double mTargetRPM, mCurrentRPM, mPIDEffort, mFFEffort;
public static final LinearSystem<N1, N1, N1> kPlant = LinearSystemId.identifyVelocitySystem(12d/6380d, 0.12);
LinearFilter mNoiseFilter = LinearFilter.singlePoleIIR(0.1, 0.02);
VisionSupplier vision;
public boolean useVision = false;
private FlywheelSim mSim = new FlywheelSim(
kPlant,
DCMotor.getFalcon500(1),
1/1.5
);
public FlywheelSubsystem(VisionSupplier vision) {
configureMotor();
mPID.setTolerance(100);
mTargetRPM = 0;
this.vision = vision;
}
public void configureMotor(){
mMaster.configFactoryDefault();
mMaster.setNeutralMode(NeutralMode.Coast);
mMaster.configVelocityMeasurementPeriod(SensorVelocityMeasPeriod.Period_1Ms);
mMaster.configVelocityMeasurementWindow(1);
mMaster.setInverted(true);
mMaster.setStatusFramePeriod(StatusFrameEnhanced.Status_10_Targets, 250);
mMaster.setStatusFramePeriod(StatusFrameEnhanced.Status_11_UartGadgeteer, 250);
mMaster.setStatusFramePeriod(StatusFrameEnhanced.Status_13_Base_PIDF0, 250);
mMaster.setStatusFramePeriod(StatusFrameEnhanced.Status_14_Turn_PIDF1, 250);
mMaster.setStatusFramePeriod(StatusFrameEnhanced.Status_9_MotProfBuffer, 250);
mMaster.configSupplyCurrentLimit(new SupplyCurrentLimitConfiguration(true, 40, 40, 0));
mSlave.configFactoryDefault();
mSlave.setNeutralMode(NeutralMode.Coast);
mSlave.configVelocityMeasurementPeriod(SensorVelocityMeasPeriod.Period_100Ms);
mSlave.configVelocityMeasurementWindow(32);
mSlave.setInverted(false);
mSlave.setStatusFramePeriod(StatusFrameEnhanced.Status_10_Targets, 250);
mSlave.setStatusFramePeriod(StatusFrameEnhanced.Status_11_UartGadgeteer, 250);
mSlave.setStatusFramePeriod(StatusFrameEnhanced.Status_13_Base_PIDF0, 250);
mSlave.setStatusFramePeriod(StatusFrameEnhanced.Status_14_Turn_PIDF1, 250);
mSlave.setStatusFramePeriod(StatusFrameEnhanced.Status_9_MotProfBuffer, 250);
mSlave.configSupplyCurrentLimit(new SupplyCurrentLimitConfiguration(true, 40, 40, 0));
}
public void runFlywheel(){
mCurrentRPM = getRPM();
// if(vision.hasTarget() && useVision){
// setTargetRPM(InterpolatingTable.get(vision.getDistance()).rpm);
// }
if(mTargetRPM != 0){
mFFEffort = mFeedForward.calculate(mTargetRPM+200);
mPIDEffort = mPID.calculate(mCurrentRPM, mTargetRPM+200);
}else{
mFFEffort = 0;
mPIDEffort = 0;
}
mMaster.setVoltage(mPIDEffort + mFFEffort);
}
@Config(tabName = "Shooter", name = "Set RPM")
public void setTargetRPM(double newTarget){
mTargetRPM = newTarget;
}
@Log(tabName = "Shooter", name = "Filtered RPM")
public double getRPM(){
return mNoiseFilter.calculate((mMaster.getSelectedSensorVelocity()/2048d)*600*1.5);
}
@Log(tabName = "Shooter", name ="Raw RPM")
private double getRawRPM(){
return (mMaster.getSelectedSensorVelocity()/2048d)*600*1.5;
}
@Log(tabName = "Shooter", name = "Flywheel Ready")
public boolean ready(){
return ArborMath.inTolerance(Math.abs(mTargetRPM+200-mCurrentRPM), 100) && mTargetRPM != 0;
}
public void stop(){
setTargetRPM(0);
mMaster.setVoltage(0);
}
@Override
public void periodic() {
runFlywheel();
}
@Override
public void simulationPeriodic() {
mSim.setInput(mMaster.get() * RobotController.getInputVoltage());
mSim.update(0.02);
double flywheelNativeVelocity = mSim.getAngularVelocityRPM() * 2048d / (60d * 10d) * 1d/1.5d;
double flywheelNativePositionDelta = flywheelNativeVelocity*10*0.02;
mMasterSim.setIntegratedSensorVelocity((int)flywheelNativeVelocity);
mMasterSim.addIntegratedSensorPosition((int)flywheelNativePositionDelta);
mMasterSim.setBusVoltage(RobotController.getBatteryVoltage());
}
@Log(tabName = "Shooter", name = "RPM Target")
private double getRPMTarget(){
return mTargetRPM;
}
public void enableVision(){
useVision = true;
}
public void disableVision(){
useVision = false;
}
}
|
<reponame>jeremybise/thosegeeks-gatsby
import React from "react"
import { css } from "@emotion/core"
import { breakpoints } from "./design-tokens"
const wrapper = css`
font-size: 2rem;
text-transform: uppercase;
margin-bottom: 2rem;
color: var(--text);
text-align: center;
@media (min-width: ${breakpoints.md}) {
font-size: 3.25rem;
}
`
const PageTitle = ({ children }) => <h1 css={wrapper}>{children}</h1>
export default PageTitle
|
from typing import List
class KinesisStreamRecord:
# Assume this class is already implemented
pass
class KinesisStreamEvent:
"""
Represents a Kinesis stream event.
Attributes:
----------
Records: List[KinesisStreamRecord]
A list of Kinesis stream records within the event.
"""
def __init__(self):
self._records = []
@property
def Records(self) -> List[KinesisStreamRecord]:
return self._records
@Records.setter
def Records(self, records: List[KinesisStreamRecord]):
if isinstance(records, list):
self._records = records
else:
raise ValueError("Records must be a list of KinesisStreamRecord objects")
# Usage example
event = KinesisStreamEvent()
record1 = KinesisStreamRecord()
record2 = KinesisStreamRecord()
event.Records = [record1, record2]
print(event.Records) # Output: [record1, record2] |
<reponame>coder-blog/satellite
package com.kinstalk.satellite.common.utils;
/**
* Created by digitZhang on 16/4/18.
*/
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.text.DateFormat;
import java.text.FieldPosition;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
public class ExecSystemCommand {
private static String INPUT_STREAM = "INPUT_STREAM";
private static String ERROR_STREAM = "ERROR_STREAM";
private static Logger logger = LoggerFactory.getLogger(ExecSystemCommand.class);
/**
* 返回命令执行结果信息串
*
* @param command 要执行的命令
* @return 第一个为标准信息,第二个为错误信息,如果不存在则相应为空
*/
public static String[] exec(String command) {
//存储返回结果,第一个为标准信息,第二个为错误信息
String result[] = new String[2];
try {
Process process;
Runtime runtime = Runtime.getRuntime();
String osName = System.getProperty("os.name").toLowerCase();
if (osName.contains("windows 9")) {
process = runtime.exec("command.com /c " + command);
} else if ((osName.contains("nt"))
|| (osName.contains("windows 20"))
|| (osName.contains("windows xp") || (osName.contains("windows vista")))) {
/*
* 开关/C指明后面跟随的字符串是命令,并在执行命令后关闭DOS窗口,使用cmd /?查看帮助
*/
process = runtime.exec("cmd.exe /c " + command);
} else {
// Linux,Unix
process = runtime.exec(command);
}
Object mutexInstream = new Object();
Object mutexErrorstream = new Object();
new ReadThread(process.getInputStream(), INPUT_STREAM, result, mutexInstream)
.start();
new ReadThread(process.getErrorStream(), ERROR_STREAM, result, mutexErrorstream)
.start();
//确保子线程已启动
Thread.sleep(20);
/*
* 这里一定要等标准流与错误都读完了后才能继续执行后面的代码,否则外面引用返回的结果可能
* 为null或空串,所以要等两个线程执行完,这里确保读取的结果已返回。在读取时使用了两个线
* 程,因为发现在一个线程里读这种流时,有时会阻塞,比如代码实现时先读取标准流,而运行时
* 命令却执行失败,这时读标准流的动作会阻塞,导致程序最终挂起,先读错误流时如果执行时成
* 功,这时又可能挂起。还有一个问题就是即使使用两个线程分别读取流,如果不使用同步锁时,也
* 会有问题:主线程读不到子线程返回的数据,这主要是由于主线读取时子线还没未返回读取到的信
* 息,又因为两个读线程不能互斥,但又要与主线程同步,所以使用了两个同步锁,这样两个线程谁
* 先执行互不影响,而且主线程阻塞直到标准信息与错误信息都返回为止
*/
synchronized (mutexInstream) {
synchronized (mutexErrorstream) {
/*
* 导致当前线程等待,如果必要,一直要等到由该 Process 对象表示的进程已经终止
* 。如果已终止该子进程,此方法立即返回。如果没有终止该子进程,调用的线程将被
* 阻塞,直到退出子进程。
* process.waitFor()目的是等待子进程完成后再往下执行,不过这里好像没有什么
* 太大的作用除了用来判断返回的状态码外,因为如果程序进到这里表示子线程已执行完
* 毕,process子进程理所当然的也已执行完毕,如果子进程process未执行完,我想
* 读流的操作肯定会阻塞的。
*
* 另外,使用process.waitFor()要注的是一定不要在数据流读取前使用,否则线程
* 也会挂起,导致该现象的原因可能是该命令的输内容出比较多,而运行窗口的输出缓冲
* 区不够大,最后没不能写缓冲引起,所以这里先使用了两个单独的线程去读,这样不管
* 数据量有多大,都不会阻塞了。
*/
if (process.waitFor() != 0) {
result[0] = null;
} else {
result[1] = null;
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
/*
* 标准流与错误流读取线程
*/
private static class ReadThread extends Thread {
private InputStream is;
private String[] resultArr;
private String type;
private final Object mutex;
public ReadThread(InputStream is, String type, String[] resultArr, Object mutex) {
this.is = is;
this.type = type;
this.resultArr = resultArr;
this.mutex = mutex;
}
public void run() {
synchronized (mutex) {
try {
// int readInt = is.read();
StringBuilder sbu = new StringBuilder();
int idx;
byte[] byteArr = new byte[1024 * 10];
while ((idx = is.read(byteArr)) != -1) {
byte[] byteData = new byte[idx];
System.arraycopy(byteArr, 0, byteData, 0, idx);
if (byteData.length > 1) {
sbu.append(new String(byteData));
logger.info(new String(byteData).replaceAll("\n", ""));
}
}
//
// /*
// * 这里读取时我们不要使用字符流与缓冲流,发现执行某些命令时会阻塞,不
// * 知道是什么原因。所有这里使用了最原始的流来操作,就不会出现问题。
// */
// while (readInt != -1) {
// result.add(Byte.valueOf(String.valueOf((byte) readInt)));
// readInt = is.read();
// }
// byte[] byteArr = new byte[result.size()];
// for (int i = 0; i < result.size(); i++) {
// byteArr[i] = ((Byte) result.get(i)).byteValue();
// }
if (ERROR_STREAM.equals(this.type)) {
resultArr[1] = sbu.toString();
} else {
resultArr[0] = sbu.toString();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
public static void main(String args[]) throws Throwable {
}
}
|
function findCommonElements (arr1: string[], arr2: string[]) {
let result = [];
// loop through each array
arr1.forEach((item1) => {
arr2.forEach((item2) => {
// if item1 matches item2, push to result array
if (item1 === item2) {
result.push(item1);
}
});
});
// print out the result array
console.log(result);
}
// example
let arr1 = ["cat", "dog", "bear"];
let arr2 = ["dog", "bear", "tiger"];
findCommonElements(arr1, arr2);
// Output: ["dog", "bear"] |
def createTodoList(name):
todoList = []
todoList.append({'name': name, 'tasks': []})
return todoList |
import gql from "graphql-tag";
export const GET_INSTRUCTORS = gql`
{
allInstructors {
id
name
}
}
`;
export const GET_INSTRUCTOR = gql`
query ($id: ID) {
instructor(id: $id) {
id
name
}
}
`;
export const UPDATE_INSTRUCTOR = gql`
mutation ($instructorInput: InstructorInput) {
updateInstructor(instructorInput: $instructorInput) {
id
name
}
}
`;
export const DELETE_INSTRUCTOR = gql`
mutation ($id: ID) {
deleteInstructor(id: $id) {
id
}
}
`;
export const CREATE_INSTRUCTOR = gql`
mutation ($instructorInput: InstructorInput) {
createInstructor(instructorInput: $instructorInput) {
id
name
}
}
`; |
import { Schema } from 'mongoose';
const types = [
'PEPS',
'COSTO PROMEDIO',
];
export const SheetStorageSchema: Schema = new Schema({
name: {
type: String,
required: true,
},
type: {
type: String,
enum: types,
required: true,
},
info: [
{
type: Schema.Types.ObjectId,
ref: 'StorageInfo',
},
],
},
{
timestamps: true,
},
);
|
package com.dd.sample;
import android.app.ListActivity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.ListView;
public class MainActivity extends ListActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
String[] items = getResources().getStringArray(R.array.sample_list);
ArrayAdapter<String> adapter =
new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, items);
setListAdapter(adapter);
}
@Override
protected void onListItemClick(ListView l, View v, int position, long id) {
switch (position) {
case 0:
startSignInActivity(false);
break;
case 1:
startSignInActivity(true);
break;
case 2:
startMessageActivity();
break;
case 3:
startUploadActivity();
break;
case 4:
startStateSampleActivity();
break;
}
}
private void startStateSampleActivity() {
Intent intent = new Intent(this, StateSampleActivity.class);
startActivity(intent);
}
private void startUploadActivity() {
Intent intent = new Intent(this, UploadActivity.class);
startActivity(intent);
}
private void startSignInActivity(boolean isEndlessMode) {
Intent intent = new Intent(this, SignInActivity.class);
intent.putExtra(SignInActivity.EXTRAS_ENDLESS_MODE, isEndlessMode);
startActivity(intent);
}
private void startMessageActivity() {
Intent intent = new Intent(this, MessageActivity.class);
startActivity(intent);
}
}
|
'use strict';
const electron = require('electron');
const { app, shell, Menu, autoUpdater, dialog, BrowserWindow } = electron;
const path = require('path');
const defaultMenu = require('electron-default-menu');
const env = require('./config/env.js');
const constants = require('./helpers/constants');
const fs = require('fs');
let preferencesWindow = null;
function setupMenu(webContents) {
const menu = defaultMenu(app, shell);
// Main menu
let mainMenu = menu[0];
mainMenu.submenu.splice(
1,
0,
{
type: 'separator',
},
{
label: 'Messenger Preferences...',
click() {
webContents.send(constants.SHOW_MESSENGER_SETTINGS);
},
},
{
label: 'Preferences...',
accelerator: 'CmdOrCtrl+,',
click() {
if (preferencesWindow != null) {
return;
}
preferencesWindow = new BrowserWindow({
width: 600,
height: 700,
titleBarStyle: 'default',
title: 'Preferences',
webPreferences: {
nodeIntegration: true,
},
});
preferencesWindow.loadURL(`file://${path.join(__dirname, '/preferences/index.html')}`);
preferencesWindow.once('ready-to-show', () => {
preferencesWindow.show();
});
preferencesWindow.on('close', () => {
preferencesWindow = null;
});
},
}
);
if (env.name === 'production') {
menu[0].submenu.splice(1, 0, {
label: 'Check for Update',
click() {
autoUpdater.on('update-not-available', () => {
autoUpdater.removeAllListeners('update-not-available');
dialog.showMessageBox({
message: 'No update available',
detail: `${env.appName} ${app.getVersion()} is the latest version available.`,
buttons: [ 'OK' ],
});
});
autoUpdater.checkForUpdates();
},
});
}
// Fix incorrect accelerator for "Hide Others" (imported from defaultMenu())
mainMenu.submenu[mainMenu.submenu.findIndex(item => item.label === 'Hide Others')].accelerator = 'Command+Option+H';
// File menu
menu.splice(menu.findIndex(item => item.label === 'Edit'), 0, {
label: 'File',
submenu: [
{
label: 'New Conversation',
accelerator: 'CmdOrCtrl+N',
click() {
webContents.send(constants.NEW_CONVERSATION);
},
},
{
label: 'Dark Mode',
accelerator: 'CmdOrCtrl+D',
type: 'checkbox',
checked: false,
click() {
console.log(menu[1].submenu[1].checked);
if (!(menu[1].submenu[1].checked)) {
webContents.insertCSS(fs.readFileSync(path.join(__dirname, '/assets/nightmode.css'), 'utf8'));
menu[1].submenu[1].checked = true;
}
else {
//webContents.executeJavaScript('document.styleSheets[6].disabled = !document.styleSheets[6].disabled;');
webContents.reload()
menu[1].submenu[1].checked = false;
}
},
},
],
});
menu[1].submenu.push(
{
type: 'separator',
},
{
label: 'Logout',
click() {
logout(webContents);
},
}
);
// View menu
let viewMenu = menu[menu.findIndex(item => item.label === 'View')];
viewMenu.submenu.splice(
0,
0,
{
type: 'separator',
},
{
label: 'Inbox',
accelerator: 'CmdOrCtrl+1',
click() {
webContents.send(constants.SHOW_MESSAGE_LIST_INBOX);
},
},
{
label: 'Active contacts',
accelerator: 'CmdOrCtrl+2',
click() {
webContents.send(constants.SHOW_MESSAGE_LIST_ACTIVE_CONTACTS);
},
},
{
label: 'Message requests',
accelerator: 'CmdOrCtrl+3',
click() {
webContents.send(constants.SHOW_MESSAGE_LIST_MESSAGE_REQUESTS);
},
},
{
label: 'Archived threads',
accelerator: 'CmdOrCtrl+4',
click() {
webContents.send(constants.SHOW_MESSAGE_LIST_ARCHIVED_THREADS);
},
},
{
type: 'separator',
}
);
// Conversation menu
menu.splice(menu.findIndex(item => item.label === 'Window'), 0, {
label: 'Conversation',
submenu: [
{
label: 'Mute',
accelerator: 'CmdOrCtrl+shift+M',
click() {
webContents.send(constants.MUTE_CONVERSATION);
},
},
{
type: 'separator',
},
{
label: 'Archive',
accelerator: 'CmdOrCtrl+shift+A',
click() {
webContents.send(constants.ARCHIVE_CONVERSATION);
},
},
{
label: 'Delete',
accelerator: 'CmdOrCtrl+shift+D',
click() {
webContents.send(constants.DELETE_CONVERSATION);
},
},
{
type: 'separator',
},
{
label: 'Mark as Unread/Read',
accelerator: 'CmdOrCtrl+shift+R',
click() {
webContents.send(constants.MARK_CONVERSATION_UNREAD);
},
},
{
label: 'Mark as Spam',
click() {
webContents.send(constants.MARK_CONVERSATION_SPAM);
},
},
{
label: 'Report Spam or Abuse',
click() {
webContents.send(constants.REPORT_CONVERSATION_SPAM_OR_ABUSE);
},
},
],
});
// Window Menu
let windowMenu = menu[menu.findIndex(item => item.label === 'Window')];
windowMenu.submenu.push(
{
label: 'Select Next Conversation',
accelerator: 'CmdOrCtrl+]',
click() {
webContents.send(constants.NEXT_CONVERSATION);
},
},
{
label: 'Select Previous Conversation',
accelerator: 'CmdOrCtrl+[',
click() {
webContents.send(constants.PREV_CONVERSATION);
},
},
{
type: 'separator',
}
);
// Help Menu
let helpMenu = menu[menu.findIndex(item => item.label === 'Help')];
helpMenu.submenu[helpMenu.submenu.findIndex(item => item.label === 'Learn More')].click = () => {
// Load goofy website
shell.openExternal('https://www.goofyapp.com');
};
Menu.setApplicationMenu(Menu.buildFromTemplate(menu));
}
function logout(webContents) {
const c = webContents.session.cookies;
c.get({}, (error, cookies) => {
for (let i = cookies.length - 1; i >= 0; i--) {
const { name, domain, path, secure } = cookies[i];
const url = 'http' + (secure ? 's' : '') + '://' + domain + path;
c.remove(url, name, () => {});
}
});
// this waits for all cookies to be removed, it would be nicer to wait for all callbacks to be called
setTimeout(
() => {
app.relaunch();
app.exit(0);
},
500
);
}
module.exports = setupMenu;
|
#!/bin/sh
################################################################################
##
## Licensed to the Apache Software Foundation (ASF) under one or more
## contributor license agreements. See the NOTICE file distributed with
## this work for additional information regarding copyright ownership.
## The ASF licenses this file to You under the Apache License, Version 2.0
## (the "License"); you may not use this file except in compliance with
## the License. You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
################################################################################
set -e
# Download the collection of files associated with an Apache PLC4X
# Release or Release Candidate from the Apache Distribution area:
# https://dist.apache.org/repos/dist/release/plc4x
# or https://dist.apache.org/repos/dist/dev/plc4x
# respectively.
#
# Prompts before taking actions unless "--nquery"
# Prompts to perform signature validation (using buildTools/check_sigs.sh)
# unless --nvalidate or --validate is specified.
. `dirname $0`/common.sh
setUsage "`basename $0` [--nquery] [--validate|--nvalidate] <version> [<rc-num>]"
handleHelp "$@"
BUILDTOOLS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
NQUERY=
if [ "$1" == "--nquery" ]; then
NQUERY="--nquery"; shift
fi
VALIDATE=-1 # query
if [ "$1" == "--validate" ]; then
VALIDATE=1; shift
elif [ "$1" == "--nvalidate" ]; then
VALIDATE=0; shift
fi
requireArg "$@"
VER=$1; shift
checkVerNum $VER || usage "Not a X.Y.Z version number \"$VER\""
RC_NUM=
if [ $# -gt 0 ]; then
RC_NUM=$1; shift
checkRcNum ${RC_NUM} || usage "Not a release candidate number \"${RC_NUM}\""
fi
noExtraArgs "$@"
# Release or Release Candidate mode
IS_RC=
if [ ${RC_NUM} ]; then
IS_RC=1
fi
BASE_URL=${PLC4X_ASF_SVN_RELEASE_URL}
if [ ${IS_RC} ]; then
BASE_URL=${PLC4X_ASF_SVN_RC_URL}
fi
RC_SFX=
if [ ${IS_RC} ]; then
RC_SFX=rc${RC_NUM}
fi
DST_BASE_DIR=downloaded-plc4x-${VER}${RC_SFX}
[ -d ${DST_BASE_DIR} ] && die "${DST_BASE_DIR} already exists"
[ ${NQUERY} ] || confirm "Proceed to download to ${DST_BASE_DIR} from ${BASE_URL}?" || exit
echo Downloading to ${DST_BASE_DIR} ...
function mywget() {
# OSX lacks wget by default
(set -x; curl -f -O $1)
}
function getSignedBundle() {
mywget ${1}
mywget ${1}.asc
mywget ${1}.sha512
}
mkdir -p ${DST_BASE_DIR}
cd ${DST_BASE_DIR}
ABS_BASE_DIR=`pwd`
URL=${BASE_URL}
mywget ${URL}/KEYS
DST_VER_DIR=${VER}
URL=${BASE_URL}/${VER}
if [ ${IS_RC} ]; then
DST_VER_DIR=${DST_VER_DIR}/${RC_SFX}
URL=${URL}/${RC_SFX}
fi
mkdir -p ${DST_VER_DIR}
cd ${DST_VER_DIR}
mywget ${URL}/README
mywget ${URL}/RELEASE_NOTES
getSignedBundle ${URL}/apache-plc4x-${VER}-source-release.zip
echo
echo Done Downloading to ${DST_BASE_DIR}
[ ${VALIDATE} == 0 ] && exit
[ ${VALIDATE} == 1 ] || [ ${NQUERY} ] || confirm "Do you want to check the bundle signatures and compare source bundles?" || exit
cd ${ABS_BASE_DIR}
echo
echo "If the following bundle gpg signature checks fail, you may need to"
echo "import the project's list of signing keys to your keyring"
echo " $ gpg ${DST_BASE_DIR}/KEYS # show the included keys"
echo " $ gpg --import ${DST_BASE_DIR}/KEYS"
echo
echo "Verifying the source bundle signatures..."
(set -x; $BUILDTOOLS_DIR/check_sigs.sh ${DST_VER_DIR})
|
def merge_arrays(A, B):
result = A + B
result.sort()
return result |
<gh_stars>1-10
/*
MANGO Multimedia Development Platform
Copyright (C) 2012-2020 Twilight Finland 3D Oy Ltd. All rights reserved.
*/
#pragma once
#include <mango/image/exif.hpp>
#include <mango/image/fourcc.hpp>
#include <mango/image/color.hpp>
#include <mango/image/format.hpp>
#include <mango/image/compression.hpp>
#include <mango/image/decoder.hpp>
#include <mango/image/encoder.hpp>
#include <mango/image/blitter.hpp>
#include <mango/image/surface.hpp>
#include <mango/image/quantize.hpp>
|
python3 /workspace/mnt/group/general-reg/denglei/code/EAST-master/test.py |
#!/bin/bash
set -e
cd "$(dirname "$0")/.."
watchexec -r -w src --signal SIGTERM -- ./dev/build-exec.sh "$@"
|
<gh_stars>0
import {zjson, zng} from "zealot"
import {
addHeadProc,
getHeadCount,
hasHeadOrTailProc,
joinParts,
parallelizeProcs,
splitParts
} from "../lib/Program"
import brim from "./"
import {createCell} from "./cell"
describe("excluding and including", () => {
const field = createCell(
new zng.Field("uid", new zng.Primitive("string", "123"))
)
test("excluding a field", () => {
const program = brim
.program("_path=weird")
.exclude(field)
.string()
expect(program).toEqual('_path=weird uid!="123"')
})
test("excluding a field with a pipe", () => {
const data = new zng.Primitive("string", "HTTP")
const program = brim
.program(
'tx_hosts=fc00:db20:35b:7399::5:135e fuid!="F2nyqx46YRDAYe4c73" | sort'
)
.exclude(createCell(new zng.Field("source", data)))
.string()
expect(program).toEqual(
'tx_hosts=fc00:db20:35b:7399::5:135e fuid!="F2nyqx46YRDAYe4c73" source!="HTTP" | sort'
)
})
test("excluding a field with two pipes", () => {
const program = brim
.program("_path=weird | sort | filter 1")
.exclude(field)
.string()
expect(program).toEqual('_path=weird uid!="123" | sort | filter 1')
})
test("including a field with two pipes", () => {
const program = brim
.program("_path=weird | sort | filter 1")
.include(field)
.string()
expect(program).toEqual('_path=weird uid="123" | sort | filter 1')
})
})
describe("drill down", () => {
const columns = [
{name: "id", type: "record", of: [{name: "orig_h", type: "addr"}]},
{name: "proto", type: "enum"},
{name: "query", type: "string"},
{name: "count", type: "count"}
] as zjson.Column[]
const result = new zng.Record(columns, [
["192.168.0.54"],
"udp",
"WPAD",
"24"
])
test("when there is no leading filter", () => {
const program = brim
.program("count() by id.orig_h")
.drillDown(result)
.string()
expect(program).toBe("id.orig_h=192.168.0.54")
})
test("combines keys in the group by proc", () => {
const program = brim
.program("_path=dns | count() by id.orig_h, proto, query | sort -r")
.drillDown(result)
.string()
expect(program).toBe(
'_path=dns id.orig_h=192.168.0.54 proto=udp query="WPAD"'
)
})
test("removes *", () => {
const program = brim
.program("* | count() by id.orig_h")
.drillDown(result)
.string()
expect(program).toBe("id.orig_h=192.168.0.54")
})
test("easy peasy", () => {
const program = brim
.program("names james | count() by proto")
.drillDown(result)
.string()
expect(program).toBe("names james proto=udp")
})
test("count by and filter the same", () => {
const result = new zng.Record(
[
{type: "string", name: "md5"},
{type: "count", name: "count"}
],
["123", "1"]
)
const program = brim
.program("md5=123 | count() by md5 | sort -r | head 5")
.drillDown(result)
.string()
expect(program).toEqual('md5=123 md5="123"')
})
test("filter query", () => {
const result = new zng.Record(
[
{name: "md5", type: "string"},
{name: "count", type: "count"}
],
["9f51ef98c42df4430a978e4157c43dd5", "21"]
)
const program = brim
.program(
'_path=files filename!="-" | count() by md5,filename | count() by md5 | sort -r | filter count > 1'
)
.drillDown(result)
.string()
expect(program).toEqual(
'_path=files filename!="-" md5="9f51ef98c42df4430a978e4157c43dd5"'
)
})
})
describe("count by", () => {
test("empty program", () => {
const data = new zng.Primitive("string", "heyo")
const field = createCell(new zng.Field("_path", data))
const program = brim
.program()
.countBy(field)
.string()
expect(program).toBe("| count() by _path")
})
test("append a count to an existing query", () => {
const data = new zng.Primitive("string", "heyo")
const field = createCell(new zng.Field("query", data))
const program = brim
.program("dns")
.countBy(field)
.string()
expect(program).toBe("dns | count() by query")
})
})
describe("sort by", () => {
test("sort asc does not yet exist", () => {
const program = brim
.program("* | count() by _path")
.sortBy("count", "asc")
.string()
expect(program).toBe("* | count() by _path | sort count")
})
test("sort desc does not yet exist", () => {
const program = brim
.program("* | count() by _path")
.sortBy("count", "desc")
.string()
expect(program).toBe("* | count() by _path | sort -r count")
})
test("sort asc when one already exists", () => {
const program = brim
.program("* | sort name")
.sortBy("count", "asc")
.string()
expect(program).toBe("* | sort count")
})
test("sort desc when one already exists", () => {
const program = brim
.program("* | sort name")
.sortBy("count", "desc")
.string()
expect(program).toBe("* | sort -r count")
})
})
describe("#hasAnalytics()", () => {
test("head proc does not have analytics", () => {
expect(brim.program("* | head 2").hasAnalytics()).toBe(false)
})
test("sort proc does not have analytics", () => {
expect(brim.program("* | sort -r id.resp_p").hasAnalytics()).toBe(false)
})
test("every proc does contain analytics", () => {
expect(brim.program("* | every 1hr count()").hasAnalytics()).toBe(true)
})
test("parallel procs when one does have analytics", () => {
expect(
brim
.program("* | split ( => every 1hr count() => count() by id.resp_h)")
.hasAnalytics()
).toBe(true)
})
test("parallel procs when both do not have analytics", () => {
expect(brim.program("* | head 100; head 200").hasAnalytics()).toBe(false)
})
test("when there are no procs", () => {
expect(brim.program("*").hasAnalytics()).toBe(false)
})
test("for a crappy string", () => {
expect(brim.program("-r").hasAnalytics()).toBe(false)
})
test("for sequential proc", () => {
expect(
brim.program("*google* | head 3 | sort -r id.resp_p").hasAnalytics()
).toBe(false)
})
test("for cut proc", () => {
expect(
brim
.program("* | split ( => cut uid, _path => cut uid) | tail 1")
.hasAnalytics()
).toBe(true)
})
test("for filter proc", () => {
expect(brim.program("* | filter _path=conn").hasAnalytics()).toBe(false)
})
})
describe("#addHeadProc", () => {
test("when no head exists", () => {
expect(addHeadProc("_path=dns", 300)).toBe("_path=dns | head 300")
})
test("when head exists", () => {
expect(addHeadProc("_path=dns | head 45", 300)).toBe("_path=dns | head 45")
})
test("when sort exists", () => {
expect(addHeadProc("_path=dns | sort ts", 300)).toBe(
"_path=dns | sort ts | head 300"
)
})
test("when sort and head exists", () => {
expect(addHeadProc("_path=dns | head 23 | sort ts", 300)).toBe(
"_path=dns | head 23 | sort ts"
)
})
})
describe("#getHeadCount", () => {
test("with one head proc", () => {
expect(getHeadCount("* | head 1000")).toBe(1000)
})
test("with many procs", () => {
expect(getHeadCount("* | split ( => head 1000 => count())")).toBe(1000)
})
test("with no head", () => {
expect(getHeadCount("*")).toBe(0)
})
})
describe("#hasHeadCount", () => {
test("#hasHeadCount when false", () => {
expect(hasHeadOrTailProc("*")).toBe(false)
})
test("#hasHeadCount when true", () => {
expect(hasHeadOrTailProc("* | head 1")).toBe(true)
})
})
describe("Get Parts of Program", () => {
const program = "md5=123 _path=files | count() by md5 | sort -r | head 1"
test("get filter part", () => {
expect(splitParts(program)[0]).toBe("md5=123 _path=files")
})
test("get filter part when none", () => {
expect(splitParts("* | count()")[0]).toBe("*")
})
test("get proc part", () => {
expect(splitParts(program)[1]).toBe("count() by md5 | sort -r | head 1")
})
test("get proc part when none", () => {
expect(splitParts("_path=files")[1]).toEqual("")
})
})
describe("Join Parts of Program", () => {
const filter = "md5=123"
const proc = "count() by _path"
test("#joinParts", () => {
expect(joinParts(filter, proc)).toBe("md5=123 | count() by _path")
})
test("#joinParts when empty filter", () => {
expect(joinParts("", proc)).toBe("* | count() by _path")
})
})
describe("Parallelizing multiple programs", () => {
const a = "md5=123 | count()"
const b = "md5=123 | head 5"
const c = "md5=123 | count() by _path"
test("#parallelizeProcs when programs have same filter", () => {
expect(parallelizeProcs([a, b, c])).toEqual(
"md5=123 | split ( => count() => head 5 => count() by _path)"
)
})
test("#parallelizeProcs when programs do no have same filter", () => {
expect(() => {
parallelizeProcs([a, b, c, "_path=conn"])
}).toThrow(
"Filters must be the same in all programs: md5=123, md5=123, md5=123, _path=conn"
)
})
})
describe("extracting the first filter", () => {
test("*", () => {
expect(brim.program("*").filter()).toEqual("*")
})
test("_path=conn", () => {
expect(brim.program("_path=conn").filter()).toEqual("_path=conn")
})
test("_path=conn | sum(duration)", () => {
expect(brim.program("_path=conn | sum(duration)").filter()).toEqual(
"_path=conn"
)
})
test("_path=conn | filter a", () => {
// This is questionable. We'd need another way to extract the filter if we
// want the result of this to be _path=conn | filter a
expect(brim.program("_path=conn | filter a").filter()).toEqual("_path=conn")
})
test("count()", () => {
expect(brim.program("count()").filter()).toEqual("*")
})
test("dns | count() | filter num > 1", () => {
expect(brim.program("dns | count() | filter num > 1").filter()).toEqual(
"dns"
)
})
})
describe("cut", () => {
test("cut some fields", () => {
expect(
brim
.program("my filter")
.cut("ts", "_path")
.string()
).toBe("my filter | cut ts, _path")
})
})
|
<gh_stars>0
package edu.neu.madcourse.thingshub.FrontEnd;
import android.app.ProgressDialog;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import edu.neu.madcourse.thingshub.R;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import de.hdodenhof.circleimageview.CircleImageView;
import com.google.android.gms.tasks.Continuation;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.gms.tasks.Task;
import com.google.firebase.auth.AuthResult;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import com.google.firebase.storage.FirebaseStorage;
import com.google.firebase.storage.StorageReference;
import com.google.firebase.storage.UploadTask;
import com.squareup.picasso.Picasso;
import com.theartofdev.edmodo.cropper.CropImage;
import com.theartofdev.edmodo.cropper.CropImageView;
import java.util.HashMap;
import java.util.Map;
public class AccountActivity extends AppCompatActivity {
private Button updateButton;
private Button friendsButton;
private Button profileButton;
private TextView username;
private EditText userSignature;
private CircleImageView userImage;
private String userName;
private DatabaseReference RootRef;
private StorageReference userImageRef;
private FirebaseAuth mAuth;
private ProgressDialog loadingBar;
private Uri imageUri;
ValueEventListener valueEventListener;
private static final int PICTURE_NUM = 1;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_account);
Bundle extras = getIntent().getExtras();
userName = extras.getString("username");
RootRef = FirebaseDatabase.getInstance().getReference();
userImageRef = FirebaseStorage.getInstance().getReference().child("User Images");
// Enable sign in anonymously in case sign in error
mAuth = FirebaseAuth.getInstance();
signInAnonymously();
initializeFields();
// update signature
updateButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
updateSettings();
}
});
//go friends page
friendsButton.setOnClickListener(view -> {
Intent friendIntent = new Intent();
friendIntent.setClass(AccountActivity.this, FriendActivity.class);
startActivity(friendIntent);
});
//go profile page
profileButton.setOnClickListener(view -> {
Intent profileIntent = new Intent();
profileIntent.putExtra("UserName", userName);
profileIntent.setClass(AccountActivity.this, ThingsList_activity.class);
startActivity(profileIntent);
});
// update user image
userImage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent pictureIntent = new Intent();
pictureIntent.setAction(Intent.ACTION_GET_CONTENT);
pictureIntent.setType("image/*");
startActivityForResult(pictureIntent, PICTURE_NUM);
}
});
}
private void signInAnonymously() {
mAuth.signInAnonymously().addOnSuccessListener(this, new OnSuccessListener<AuthResult>() {
@Override
public void onSuccess(AuthResult authResult) {
// Continue
}
}).addOnFailureListener(this, new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception exception) {
}
});
}
private void initializeFields() {
username = (TextView) findViewById(R.id.account_name);
updateButton = (Button) findViewById(R.id.account_update_button);
userSignature = (EditText) findViewById(R.id.account_signature);
userImage = (CircleImageView) findViewById(R.id.account_image);
loadingBar = new ProgressDialog(this);
friendsButton = findViewById(R.id.account_friends_button);
profileButton = findViewById(R.id.account_profile_button);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == PICTURE_NUM && resultCode == RESULT_OK && data != null) {
imageUri = data.getData();
userImage.setImageURI(imageUri);
CropImage.activity()
.setGuidelines(CropImageView.Guidelines.ON)
.setAspectRatio(1, 1)
.start(this);
}
if (requestCode == CropImage.CROP_IMAGE_ACTIVITY_REQUEST_CODE) {
CropImage.ActivityResult result = CropImage.getActivityResult(data);
if (resultCode == RESULT_OK) {
loadingBar.setTitle("Set User Image");
loadingBar.setMessage("Image is uploading...");
loadingBar.setCanceledOnTouchOutside(false);
loadingBar.show();
Uri resultUri = result.getUri();
final StorageReference filePath = userImageRef.child(userName + ".jpg");
filePath.putFile(resultUri).continueWith(new Continuation<UploadTask.TaskSnapshot, Object>() {
@Override
public Object then(@NonNull Task<UploadTask.TaskSnapshot> task) throws Exception {
if (task.isSuccessful()) {
filePath.getDownloadUrl().addOnCompleteListener(new OnCompleteListener<Uri>() {
@Override
public void onComplete(@NonNull Task<Uri> task) {
if (task.isSuccessful()) {
final String downloadedUrl = task.getResult().toString();
RootRef.child("Users").child(userName).child("Image")
.setValue(downloadedUrl)
.addOnCompleteListener(new OnCompleteListener<Void>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
if (task.isSuccessful()) {
Toast.makeText(
AccountActivity.this,
"Image save successfully!",
Toast.LENGTH_SHORT)
.show();
} else {
String error = task.getException().toString();
Toast.makeText(
AccountActivity.this,
"Save image error: " + error,
Toast.LENGTH_SHORT)
.show();
}
loadingBar.dismiss();
}
});
} else {
String error = task.getException().toString();
Toast.makeText(
AccountActivity.this,
"Download image error: " + error,
Toast.LENGTH_SHORT)
.show();
loadingBar.dismiss();
}
}
});
} else {
String error = task.getException().toString();
Toast.makeText(
AccountActivity.this,
"Upload image error: " + error,
Toast.LENGTH_SHORT)
.show();
loadingBar.dismiss();
}
return null;
}
});
}
}
}
private void updateSettings() {
String setUserSignature = userSignature.getText().toString();
Map<String, String> profileMap = new HashMap<>();
profileMap.put("Signature", setUserSignature);
DatabaseReference dbRef = FirebaseDatabase.getInstance().getReference("/Users/"+ userName + "/Signature");
dbRef.setValue(setUserSignature)
.addOnCompleteListener(new OnCompleteListener<Void>() {
@Override
public void onComplete(@NonNull Task<Void> task) {
if (task.isSuccessful()) {
Toast.makeText(
AccountActivity.this,
"Profile update successfully!",
Toast.LENGTH_SHORT)
.show();
} else {
String error = task.getException().toString();
Toast.makeText(
AccountActivity.this,
"error: " + error,
Toast.LENGTH_SHORT)
.show();
}
}
});
}
private void getUserProfile() {
RootRef.child("Users").child(userName)
.addValueEventListener(valueEventListener = new ValueEventListener() {
@Override
public void onDataChange(@NonNull DataSnapshot snapshot) {
if (snapshot.exists()) {
if (snapshot.hasChild("Signature")) {
String retrieveSignature = snapshot.child("Signature").getValue().toString();
userSignature.setText(retrieveSignature);
}
if (snapshot.hasChild("Image") && !snapshot.child("Image").getValue().toString().isEmpty()) {
String retrieveImage = snapshot.child("Image").getValue().toString();
Picasso.get().load(retrieveImage).into(userImage);
}
}
username.setText(userName);
}
@Override
public void onCancelled(@NonNull DatabaseError error) {
}
});
}
@Override
protected void onPause() {
super.onPause();
RootRef.child("Users").child(userName).removeEventListener(valueEventListener);
}
@Override
protected void onResume() {
super.onResume();
getUserProfile();
}
}
|
export const setInnerPadding = (layout: { colspan: number }, tile: { styling?: { background?: { color: string } }, isFullWidth: boolean }): string | null => {
const { colspan } = layout;
const { styling, isFullWidth } = tile;
if (colspan !== dimensions.cols) {
return 'pl-10';
}
if (colspan === dimensions.cols && !isFullWidth && styling?.background?.color) {
return 'px-10';
}
return null;
}; |
#!/bin/bash
# Copyright (C) 2019-2020 F5 Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# shellcheck source=src/lib/bash/common.sh
source "$(realpath "$(dirname "${BASH_SOURCE[0]}")")/common.sh"
# shellcheck source=src/lib/bash/util/logger.sh
source "$(realpath "$(dirname "${BASH_SOURCE[0]}")")/util/logger.sh"
####################################################################
# Reads the given isolinux.cfg file to extract the "append" section from it.
# The output is used as the kerne_args for the custom qemu instance to install
# BIG-IP ISOs.
#
# Return value:
# 0 for success, and 1 otherwise.
# Also prints the output in stdout.
#
function get_kernel_args {
local boot_conf="$1"
if [[ -z "$boot_conf" ]]; then
log_error "Usage: ${FUNCNAME[0]} <iso>"
return 1
elif [[ ! -s "$boot_conf" ]]; then
log_error "$boot_conf is empty."
return 1
fi
local found_append=0
local kernel_args first rest
# Read the given file and look for the lines starting with "append".
while read -r first rest; do
if [ "$first" == "append" ]; then
kernel_args="$rest"
found_append=$(( found_append + 1 ))
fi
done < "$boot_conf"
# No append section found ?
if [[ $found_append == 0 ]]; then
log_error "Missing append directive in $boot_conf."
return 1
elif [[ $found_append -gt 1 ]]; then
log_error "Multiple append directive in $boot_conf."
return 1
fi
# Append VE Platform ID to the kernel args.
kernel_args="$kernel_args mkvm_pid=Z100 mkvm_log_level=0"
echo "$kernel_args"
}
#####################################################################
#####################################################################
# Create two markers to know whether the image is 1 slot or allows all modules
# The markers will be propagated to BIG-IP by post-install.
#
# Return value:
# 0 in case of success, 1 otherwise.
#
function add_one_boot_location_markers {
local json_file="$1"
local out_dir="$2"
if [[ $# != 2 ]]; then
log_error "Usage: ${FUNCNAME[0]} <json> <output dir>"
return 1
elif [[ ! -s "$json_file" ]]; then
log_error "$json_file is empty."
return 1
elif [[ ! -d "$out_dir" ]]; then
log_error "$out_dir is missing."
return 1
fi
local module
module=$(jq -r '.modules' "$json_file" )
if [[ -z "$module" ]]; then
log_error "Failed to read .modules from '$json_file'"
return 1
elif ! is_supported_module "$module"; then
log_error "Unsupported module '$module'."
return 1
fi
local boot_loc
boot_loc=$(jq -r '.boot_locations' "$json_file" )
if [[ -z "$module" ]]; then
log_error "Failed to read .boot_locations from '$json_file'"
return 1
elif ! is_supported_boot_locations "$boot_loc"; then
log_error "Unsupported boot_locations '$boot_loc'."
return 1
fi
local one_slot_marker=".one_slot_marker"
if [[ $boot_loc == 1 ]]; then
log_info "Add one slot marker $one_slot_marker"
touch "$out_dir"/$one_slot_marker
fi
local all_modules_marker=".all_modules_marker"
if [[ "$module" == "all" ]]; then
log_info "Add all modules marker $all_modules_marker"
touch "$out_dir"/$all_modules_marker
fi
}
#####################################################################
#####################################################################
# Generates vm.install.sh that provides the partition sizes to the installer
# for the given image installation.
#
function generate_vm_install_script {
local vm_install_script="$1"
local disk_json="$2"
if [[ $# != 2 ]]; then
log_error "Usage: ${FUNCNAME[0]} <vm.install.sh path> <raw_disk json path>"
return 1
elif [[ ! -s "$disk_json" ]]; then
log_error "$disk_json is missing or empty."
return 1
fi
# Clean-up previous leftover, if any.
rm -f "$vm_install_script"
# Associative array for vm.install.sh field and corresponding disk_json entries.
declare -A tmi_volume_attributes=( \
["TMI_VOLUME_FIX_CONFIG_MIB"]=".attributes.fix_config_mib"
["TMI_VOLUME_FIX_ROOT_MIB"]=".attributes.fix_root_mib"
["TMI_VOLUME_FIX_USR_MIB"]=".attributes.fix_usr_mib"
["TMI_VOLUME_FIX_VAR_MIB"]=".attributes.fix_var_mib"
["TMI_VOLUME_FIX_BOOT_MIB"]=".attributes.fix_boot_mib"
["TMI_VOLUME_FIX_SWAP_MIB"]=".attributes.fix_swap_mib"
["TMI_VOLUME_FIX_SWAPVOL_MIB"]=".attributes.fix_swapvol_mib"
["TMI_VOLUME_FIX_SHARE_MIB"]=".attributes.fix_share_mib"
["TMI_VOLUME_FIX_APPDATA_MIB"]=".attributes.fix_appdata_mib"
["TMI_VOLUME_FIX_LOG_MIB"]=".attributes.fix_log_mib"
)
# Write the partition values to vm_install_script.
echo "#!/bin/bash" >> "$vm_install_script"
echo "export TMI_VOLUME_SET_COUNT=1" >> "$vm_install_script"
local entry value
for entry in "${!tmi_volume_attributes[@]}"; do
if ! value="$(jq -r "${tmi_volume_attributes[$entry]}" "$disk_json" )" || \
[[ -z "$value" ]]; then
log_error "Missing or Failed to read '${tmi_volume_attributes[$entry]}' from '$disk_json'."
return 1
fi
# Write the entry=value pair.
echo "export $entry=$value" >> "$vm_install_script"
done
# Is this a cloud platform? If so, write TMI_VADC_HYPERVISOR value.
if value="$(jq -r ".is_cloud" "$disk_json")"; then
if [[ $value == 1 ]]; then
if ! value="$(jq -r ".platform" "$disk_json")" ||
[[ -z "$value" ]]; then
log_error "Missing or Failed to read '.platform' from '$disk_json'."
return 1
fi
echo "export TMI_VADC_HYPERVISOR=$value" >> "$vm_install_script"
fi
else
log_error "Failed to read '.is_cloud' from '$disk_json'."
return 1
fi
chmod 755 "$vm_install_script"
}
#####################################################################
#####################################################################
# Prepares the environment for the BIG-IP and EHF ISO installations as well as
# for the SELinux labeling boot before executing exec_qemu_system() to actually
# perform the aforementioned operations.
# Usage:
# install_iso_on_disk() raw_disk disk_json bigip_iso [hotfix_iso]
# where:
# raw_disk - RAW disk on which the given ISOs needs to be installed.
# disk_json - create_raw_disk.json output from earlier step that contains
# the details of the empty raw_disk.
# bigip_iso - BIG-IP RTM ISO.
# hotfix_iso - [Optional] EHF ISO.
#
# Return value:
# Returns 0 in case of success and 1 otherwise.
#
function install_iso_on_disk {
local disk="$1"
local disk_json="$2"
local bigip_iso="$3"
# Optional argument.
local hotfix_iso="$4"
if [[ $# -lt 3 ]]; then
log_error "Usage: ${FUNCNAME[0]} <raw_disk> <raw_disk_json> <iso> [hotfix_iso]"
return 1
elif [[ ! -f "$disk" ]]; then
log_error "RAW disk '$disk' doesn't exist."
return 1
elif [[ ! -s "$disk_json" ]]; then
log_error "JSON file '$disk_json' is empty or doesn't exist."
return 1
elif [[ ! -s "$bigip_iso" ]]; then
log_error "Invalid ISO '$bigip_iso' or it doesn't exist."
return 1
fi
# RTM only installation or Hotfix installation as well?
local skip_post_install=0
[[ -n "$hotfix_iso" ]] && skip_post_install=1
# Extract the default initrd and kernel images from the ISO.
BOOT_DIR="$TEMP_DIR/boot"
local boot_initrd_base="$BOOT_DIR/initrd.base.img"
# Ensure the dir is clean from previous runs:
rm -fr "$BOOT_DIR"
mkdir "$BOOT_DIR"
local boot_vmlinuz="$BOOT_DIR/vmlinuz"
isoinfo -R -x /isolinux/vmlinuz -i "$bigip_iso" > "$boot_vmlinuz"
# Create a new updated initrd image with custom files.
update_initrd_image "RTM" "$bigip_iso" "$boot_initrd_base" \
"$skip_post_install" "$disk_json"
# Build new kernel arguments to pass.
local boot_conf="$BOOT_DIR/isolinux.cfg"
isoinfo -R -x /isolinux/isolinux.cfg -i "$bigip_iso" > "$boot_conf"
local kernel_args
if ! kernel_args="$(get_kernel_args "$boot_conf")"; then
log_error "Kernel arg extraction failed."
return 1
fi
# Set the kernel disk to vda (paravirtual).
local iso_kernel_args="$kernel_args mkvm_cpu_lm mkvm_device=/dev/vda"
local qemu_logfile="$TEMP_DIR/qemu.iso.log"
local qemu_pidfile="$TEMP_DIR/qemu.pid"
exec_qemu_system "$disk" "$bigip_iso" "$qemu_pidfile" "$boot_vmlinuz" \
"$boot_initrd_base" "$iso_kernel_args" "$qemu_logfile" \
"installing RTM Image"
if ! grep -q "MKVM FINAL STATUS = SUCCESS" "$qemu_logfile"; then
log_error "RTM ISO installation failed."
return 1
fi
# Apply HF if present - avoid extra mkvm options and use only
# the common kernel command part.
if [[ -n "$hotfix_iso" ]]; then
skip_post_install=0
update_initrd_image "HOTFIX" "$hotfix_iso" "$boot_initrd_base" \
"$skip_post_install" "$disk_json"
qemu_logfile="$TEMP_DIR/qemu.hotfix.log"
exec_qemu_system "$disk" "$hotfix_iso" "$qemu_pidfile" "$boot_vmlinuz" \
"$boot_initrd_base" "$kernel_args" "$qemu_logfile" \
"installing HF Image"
if ! grep -q "HOTFIXVM FINAL STATUS = SUCCESS" "$qemu_logfile"; then
log_error "Hotfix ISO installation failed."
return 1
fi
fi
qemu_logfile="$TEMP_DIR/qemu.selinux_relabeling.log"
# Boot the instance to execute selinux relabeling...
exec_qemu_system "$disk" 0 "$qemu_pidfile" 0 0 0 "$qemu_logfile" \
"performing selinux relabeling"
local platform
platform=$(jq -r '.platform' "$disk_json" )
if [[ -z "$platform" ]]; then
log_error "Failed to read .platform from '$disk_json'"
return 1
fi
if is_supported_cloud "$platform"; then
local artifacts_dir
artifacts_dir="$(get_config_value "ARTIFACTS_DIR")"
# Is SELinux labeling done via legacy framework?
if [[ -f "$artifacts_dir/.legacy_selinux_labeling" ]]; then
if ! grep -q "SELinux relabeling finished successfully." "$qemu_logfile"; then
log_error "SELinux labeling failed or skipped." \
"Check $qemu_logfile for complete logs"
return 1
fi
else
# Validate that final-cloud-setup successfully executed.
if ! grep -q "Cloud setup succeeded." "$qemu_logfile"; then
log_error "final-cloud-setup failed or skipped." \
"Check $qemu_logfile for complete logs"
return 1
fi
# Validate that SELinux labeling successfully executed.
if ! grep -q "SELinux targeted policy relabel is required." \
"$qemu_logfile"; then
log_error "SELinux labeling failed or skipped." \
"Check $qemu_logfile for complete logs"
return 1
fi
fi
fi
print_qemu_disk_info "$disk" "raw"
}
#####################################################################
#####################################################################
# Call python script that copies files/dir to a predefined location.
# This location will be available during post-install to copy these files to the image.
function add_injected_files {
local top_call_dir="$1"
if [[ $# != 1 ]]; then
log_error "Usage: ${FUNCNAME[0]} <top call dir>"
return 1
elif [[ ! -d "$top_call_dir" ]]; then
log_error "$top_call_dir is missing or not a directory."
return 1
fi
if "$(realpath "$(dirname "${BASH_SOURCE[0]}")")"/../../bin/read_injected_files.py "$top_call_dir" "$(realpath .)"; then
log_info "read_injected_files.py passed"
return 0
else
log_error "read_injected_files.py failed"
return 1
fi
}
#####################################################################
#####################################################################
# Adds src/bin/legacy files to the initramfs dest_dir.
#
function add_legacy_selinux_labeling_scripts {
local dest_dir="$1"
if [[ $# != 1 ]]; then
log_error "Usage: ${FUNCNAME[0]} <dest dir>"
return 1
elif [[ ! -d "$dest_dir" ]]; then
log_error "$dest_dir is missing or not a directory."
return 1
fi
log_debug "Adding legacy selinux labeling scripts."
touch "$dest_dir/.legacy_selinux_labeling"
find "$(realpath "$(dirname "${BASH_SOURCE[0]}")")"/../../bin/legacy/ -type f \
-exec cp -v {} "$dest_dir" \;
}
#####################################################################
#####################################################################
# Extracts the base initrd image from the given ISO and injects VE specific
# files to it. This updated boot_initrd_base image is then used when booting
# the qemu instance with RTM/EHF ISO.
# Expected Arguments:
# - Arg1: "RTM" or "HOTFIX" (Represents both hotfix and ehf).
# - Arg2: ISO name from which the initrd image is extracted.
# - Arg3: Base boot initrd file path. This is the returned initrd
# image that the caller uses in its qemu run.
# - Arg4: Skip post-install or not. (post-install is skipped for RTM
# if Hotfix ISO is also provided.)
# - Arg5: Option argument that gives the raw_disk.json from the previous
# step in the build pipeline. This json file contains the disk
# size for the partitions.
#
# Returns 0 on success and 1 otherwise.
#
function update_initrd_image {
local install_mode=$1
local iso_file=$2
local boot_initrd_base="$3"
local skip_post_install="$4"
local disk_json="$5"
if [[ $# != 5 ]]; then
log_error "Usage: ${FUNCNAME[0]} <install-mode> <ISO> <boot-initrd-img path>" \
" <skip-post-install> <raw-disk-json>"
return 1
elif [[ "$install_mode" != "RTM" ]] && [[ "$install_mode" != "HOTFIX" ]]; then
log_error "Unknown install_mode = '$install_mode'."
return 1
elif [[ ! -s "$iso_file" ]]; then
log_error "Expected an iso as the 2nd argument"
return 1
elif [[ ! -s "$disk_json" ]]; then
log_error "JSON file '$disk_json' is empty or doesn't exist."
return 1
fi
# Inject VE specific configuration and other scripts into the initrd.
# This happens in following steps:
# Step 1) Create a local file-system under $stage_initrd directory that
# exactly reflects an extracted initrd in terms of file system
# and relative directory paths.
# Step 2) Once all the files are in the correct place under $stage_initrd
# unzip the boot_initrd locally.
# Step 3) Append all files under $stage_initrd (as prepared in Step 1)
# to the unzipped INITRD image named "$unzipped_boot_initrd".
# Step 4) Zip the "$unzipped_boot_initrd" back to bring it in the same
# state that it was in the beginning of Step 2).
start_task=$(timer)
log_info "Inserting VM installation environment for '$install_mode'"
local boot_initrd="$BOOT_DIR/initrd.img"
# Extract the initrd.img from the ISO.
isoinfo -R -x /isolinux/initrd.img -i "$iso_file" > "$boot_initrd"
# Clean-up the stale base initrd file from the previous run.
[[ -f $boot_initrd_base ]] && rm -f "$boot_initrd_base"
local top_call_dir
top_call_dir=$(pwd)
local stage_initrd="$TEMP_DIR/stage.initrd"
mkdir "$stage_initrd"
# Step 1) Create a local file-system under $stage_initrd directory.
pushd "$stage_initrd" > /dev/null || exit
local etc_dir="etc"
local artifacts_dir
artifacts_dir="$(get_config_value "ARTIFACTS_DIR")"
if [ "$install_mode" == "RTM" ]; then
local profile_dir="$etc_dir/profile.d"
local vm_install_script="$profile_dir/vm.install.sh"
mkdir -p "$profile_dir"
if ! generate_vm_install_script "$vm_install_script" "$disk_json"; then
log_error "Failed to generate '$vm_install_script'."
return 1
fi
# Is this an RTM + Hotfix bundle ? If so, delay adding post-install to the
# initrd for now, in order to skip its execution during RTM ISO install.
if [[ $skip_post_install == 0 ]]; then
if ! add_injected_files "$top_call_dir"; then
return 1
fi
log_info "Include post-install in initrd:"
cp -f "$(realpath "$(dirname "${BASH_SOURCE[0]}")")"/../../bin/post-install \
"$etc_dir"
if [[ -f "$artifacts_dir/.legacy_selinux_labeling" ]]; then
add_legacy_selinux_labeling_scripts "$etc_dir"
fi
else
log_info "Skipped copying post-install in initrd as this is an RTM + HF install."
fi
if ! add_one_boot_location_markers "$disk_json" "$etc_dir"; then
log_error "add_one_boot_location_markers() failed."
return 1
fi
elif [ "$install_mode" == "HOTFIX" ]; then
mkdir -p "$etc_dir"
if ! add_injected_files "$top_call_dir"; then
return 1
fi
log_info "Include post-install in initrd:"
cp -f "$(realpath "$(dirname "${BASH_SOURCE[0]}")")"/../../bin/post-install \
"$etc_dir"
if [[ -f "$artifacts_dir/.legacy_selinux_labeling" ]]; then
add_legacy_selinux_labeling_scripts "$etc_dir"
fi
fi
# Step 2) Unzip the boot_initrd image.
gunzip -S .img "$boot_initrd"
local unzipped_boot_initrd="${boot_initrd%.*}"
# Step 3) Append new files to the initrd.
log_info "Append the new files in INITRD image: $unzipped_boot_initrd"
find . | cpio -o -H newc -A -F "$unzipped_boot_initrd"
# Step 4) Zip the appended initrd.
gzip -c --best "$unzipped_boot_initrd" > "$boot_initrd_base"
popd > /dev/null || exit
# Clean-up.
rm -fr "$stage_initrd"
rm -f "$unzipped_boot_initrd"
rm -f "$boot_initrd"
log_info "Inserting VM installation environment for '$install_mode' -- elapsed time:" \
"$(timer "$start_task")"
}
#####################################################################
#####################################################################
# Executes qemu-system-x86_64 with the given cmdline arguments. Pass 0 as the
# value for all options that should be skipped.
#
# Usage:
# exec_qemu_system() disk cd_disk pidfile kernel initrd append logfile tag
# where:
# disk - RAW disk on which the given qemu operation will be run.
# cd_disk - Bootable ISO (RTM or EHF) used for installation. Pass 0 as
# the value if not an ISO installation step.
# pidfile - Qemu process-id file path.
# kernel - Use the given bzImage as the kernel image. Pass 0 to skip.
# initrd - Use the given file as the initial ram disk. Pass 0 to skip.
# append - Space separated cmdline arguments for the passed-in kernel.
# Pass 0 to skip.
# logfile - Log filepath where the output from qemu-system gets stored.
# tag - Verbose tag describing given operation.
#
# Return value:
# Returns 1 in case of malformed arguments. However, it is worth noting that
# qemu-system-x86_64 returns 0 even in the case of failure to install the ISO
# or boot the instance. For that reason, the caller should always check the
# contents of logfile to check if the function execution actually succeeded
# instead of relying on the return value alone.
#
function exec_qemu_system {
local disk="$1"
local cd_disk="$2"
local pidfile="$3"
local kernel="$4"
local initrd="$5"
local append="$6"
local logfile="$7"
local tag="$8"
if [[ $# -ne 8 ]]; then
log_error "Usage: ${FUNCNAME[0]} <disk> <cd_disk> <pidfile> <kernel>" \
"<initrd> <append> <logfile>"
return 1
elif [[ "$disk" == "0" ]] || [[ ! -f "$disk" ]]; then
# disk is a required argument.
log_error "disk = '$disk', invalid or missing disk."
return 1
elif [[ "$pidfile" == "0" ]]; then
# pidfile is a required argument.
log_error "pidfile = '$pidfile', must specify a pidfile."
return 1
elif [[ "$logfile" == "0" ]]; then
# logfile is a required argument.
log_error "logfile = '$logfile', must specify a logfile."
return 1
elif [[ "$kernel" != "0" ]] && [[ ! -s "$kernel" ]]; then
# If a kernel is passed, it must be a non-empty file.
log_error "kernel = '$kernel', is an empty file."
return 1
fi
# Common cmdline arguments.
# To watch the GUI execution - remove -nographic and run the following
# command:
# vncviewer -ViewOnly localhost:<Port number in the qemu output>
# -no-reboot option because some qemu runs (like SELinux labeling) trigger
# a reboot at the end, which must be blocked to avoid booting TMOS.
local cmd_line_arg="$OPTION_KVM_ENABLED -nographic -m 2048 -machine kernel_irqchip=off -no-reboot"
# Append the disk to the cmd_line_arg.
cmd_line_arg="$cmd_line_arg -drive file=$disk,format=raw,if=virtio,cache=writeback"
if [[ "$cd_disk" != "0" ]]; then
cmd_line_arg="$cmd_line_arg -cdrom $cd_disk"
# Make cd-drive the first boot device.
cmd_line_arg="$cmd_line_arg -boot d"
else
# Make the hard-disk first boot device.
cmd_line_arg="$cmd_line_arg -boot c"
fi
# Append the pidfile argument.
cmd_line_arg="$cmd_line_arg -pidfile $pidfile"
if [[ "$kernel" != "0" ]]; then
cmd_line_arg="$cmd_line_arg -kernel $kernel"
fi
if [[ "$initrd" != "0" ]]; then
cmd_line_arg="$cmd_line_arg -initrd $initrd"
fi
local start_task
start_task=$(timer)
log_info "qemu-system $tag -- start time: $(date +%T)"
# qemu-syste-x86_64 doesn't handle empty string well. Therefore instead of running
# the execute_cmd() that internally handles this, manage the progress-bar from here
# directly.
local marker_file
local waiter_pid
marker_file="$(mktemp -p "$(get_config_value "ARTIFACTS_DIR")" "${FUNCNAME[0]}".XXXXXX)"
waiter "$marker_file" &
waiter_pid="$!"
log_trace "Created child waiter process:$waiter_pid"
local tool_log_file
if is_msg_level_high "$DEFAULT_LOG_LEVEL" && [[ -n "$LOG_FILE_NAME" ]]; then
tool_log_file="$LOG_FILE_NAME"
else
tool_log_file="/dev/null"
fi
# append takes space separated value-pairs that need special handling
# because qemu-system-x86_64 doesn't handle empty string well and fails
# complaining that the given drive is empty.
if [[ "$append" == "0" ]]; then
log_debug "Executing: qemu-system-x86_64 $cmd_line_arg"
# shellcheck disable=SC2086
# Double quoting cmd_line_arg fails with qemu as it interprets entire
# string as a single argument.
qemu-system-x86_64 $cmd_line_arg < /dev/null 2>&1 | tee -a "$logfile" "$tool_log_file" > /dev/null
else
log_debug "Executing: qemu-system-x86_64 $cmd_line_arg -append \"$append\""
# shellcheck disable=SC2086
qemu-system-x86_64 $cmd_line_arg \
-append "$append" < /dev/null 2>&1 | tee -a "$logfile" "$tool_log_file" > /dev/null
fi
# Clean-up the marker file to signal the child process to gracefully exit.
rm -f "$marker_file"
# Wait for the child process to exit. It should happen within 5 seconds as the
# signaling marker file has been already removed.
wait $waiter_pid
# Add a new-line to pretty up the progress-bar.
echo ""
log_info "qemu-system $tag -- elapsed time: $(timer "$start_task")"
}
#####################################################################
|
/*
* Copyright 2018-2020 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pranavpandey.android.dynamic.support.splash;
import android.os.AsyncTask;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.ColorInt;
import androidx.annotation.LayoutRes;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import com.pranavpandey.android.dynamic.support.theme.DynamicTheme;
/**
* A fragment to display splash before launching the main activity.
* <p>It will be used internally by the {@link DynamicSplashActivity}.
*/
public class DynamicSplashFragment extends Fragment {
/**
* Fragment argument key to set the splash layout.
*/
private static final String ADS_ARGS_SPLASH_LAYOUT_RES = "ads_args_splash_layout_res";
/**
* Async task to perform any background operation whiling showing the splash.
*/
private SplashTask mSplashTask;
/**
* Listener to implement the splash screen and to get various callbacks while showing
* the splash.
*/
private DynamicSplashListener mDynamicSplashListener;
/**
* View used by this fragment.
*/
protected View mView;
/**
* Initialize the new instance of this fragment.
*
* @param layoutRes The layout resource for this fragment.
*
* @return A instance of {@link DynamicSplashFragment}.
*/
public static Fragment newInstance(@LayoutRes int layoutRes) {
DynamicSplashFragment fragment = new DynamicSplashFragment();
Bundle args = new Bundle();
args.putInt(ADS_ARGS_SPLASH_LAYOUT_RES, layoutRes);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRetainInstance(true);
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (mDynamicSplashListener != null) {
mDynamicSplashListener.onViewCreated(mView);
}
}
@Override
public View onCreateView(@NonNull LayoutInflater inflater,
@Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
if (getArguments() != null && requireArguments().getInt(ADS_ARGS_SPLASH_LAYOUT_RES) != -1) {
mView = inflater.inflate(requireArguments().getInt(ADS_ARGS_SPLASH_LAYOUT_RES),
container, false);
}
return mView;
}
/**
* Returns the background color for this fragment.
*
* @return The background color for this fragment.
*/
public @ColorInt int getBackgroundColor() {
return DynamicTheme.getInstance().get().getPrimaryColor();
}
/**
* Start the splash background task.
*/
public void show() {
mSplashTask = new SplashTask(mDynamicSplashListener);
mSplashTask.execute();
}
/**
* Stop the splash background task.
*/
public void stop() {
if (mSplashTask != null && !mSplashTask.isCancelled()) {
mSplashTask.cancel(true);
}
}
/**
* Returns listener to implement the splash screen and to get various callbacks while
* showing the splash.
*
* @return The listener to implement the splash screen.
*/
public @Nullable DynamicSplashListener getOnSplashListener() {
return mDynamicSplashListener;
}
/**
* Set the listener to implement the splash screen and to get various callbacks while
* showing the splash.
*
* @param dynamicSplashListener The listener to be set.
*/
public void setOnSplashListener(@Nullable DynamicSplashListener dynamicSplashListener) {
mDynamicSplashListener = dynamicSplashListener;
}
/**
* Async task to perform any background operation while showing the splash.
*/
static class SplashTask extends AsyncTask<Void, String, Void> {
/**
* Start time for this task.
*/
long taskStartTime;
/**
* Time elapsed while executing this task.
*/
long taskTimeElapsed;
/**
* Listener to implement the splash screen and to get various callbacks while
* showing the splash.
*/
private DynamicSplashListener dynamicSplashListener;
/**
* Constructor to initialize an object of this class.
*
* @param dynamicSplashListener The splash listener to get the various callbacks.
*/
SplashTask(@Nullable DynamicSplashListener dynamicSplashListener) {
this.dynamicSplashListener = dynamicSplashListener;
}
@Override
protected void onPreExecute() {
super.onPreExecute();
taskStartTime = System.currentTimeMillis();
if (dynamicSplashListener != null) {
dynamicSplashListener.onPreSplash();
}
}
@Override
protected Void doInBackground(Void... params) {
if (dynamicSplashListener != null) {
dynamicSplashListener.doBehindSplash();
taskTimeElapsed = System.currentTimeMillis() - taskStartTime;
if (taskTimeElapsed < dynamicSplashListener.getMinSplashTime()) {
try {
Thread.sleep(dynamicSplashListener.getMinSplashTime() - taskTimeElapsed);
} catch (InterruptedException ignored) {
}
}
}
return null;
}
@Override
protected void onPostExecute(Void param) {
super.onPostExecute(param);
if (dynamicSplashListener != null) {
dynamicSplashListener.onPostSplash();
}
}
}
}
|
import { Component, OnInit, ViewChild, ElementRef, AfterViewInit } from '@angular/core';
import { Router, ActivatedRoute } from '@angular/router';
import { FormGroup, Validators, FormBuilder } from '@angular/forms';
import { WorkerService } from '../../../general/services/worker.service';
import { RejectedResponse } from '../../../general/services/rest.service';
import { ROUTES } from '../../../general/models/constants';
import { WorkerModel } from '../../../general/models/workers/worker.model';
import { LocalStorageService } from '../../../general/services/localstorage.service';
import { JobType } from '../../../general/models/jobtype/job-type.model';
import { ToastService } from '../../../general/services/toast.service';
@Component({
selector: 'app-update-worker',
templateUrl: 'update-worker.component.html'
})
export class UpdateWorkerComponent implements OnInit, AfterViewInit {
@ViewChild('firstControl')
firstControl: ElementRef;
workerForm: FormGroup;
submitted = false;
id = 0;
jobType: JobType;
existing: WorkerModel;
constructor(private router: Router,
private route: ActivatedRoute,
private element: ElementRef,
private workerService: WorkerService,
private toastService: ToastService,
private localStorageService: LocalStorageService,
private formBuilder: FormBuilder) {
}
get form() {
return this.workerForm.controls;
}
ngOnInit(): void {
this.workerForm = this.formBuilder.group({
name: ['', [Validators.required, Validators.maxLength(45)]],
active: ['']
});
this.route.params.subscribe(data => {
if (data['type']) {
const jobTypeId = +data['type'];
this.jobType = this.localStorageService.jobTypes.find(jobType => jobType.id === jobTypeId);
if (this.jobType) {
if (data['id']) {
this.id = data['id'];
if (this.id > 0) {
this.loadWorker();
} else {
this.router.navigate([ROUTES.notfound]);
}
} else {
this.router.navigate([ROUTES.notfound]);
}
} else {
this.router.navigate([ROUTES.notfound]);
}
} else {
this.router.navigate([ROUTES.notfound]);
}
});
}
ngAfterViewInit() {
this.firstControl.nativeElement.focus();
}
saveWorker() {
this.submitted = true;
if (this.workerForm.valid) {
this.workerService.updateWorker(WorkerModel.createInstance(this.id, this.jobType.id, this.workerForm))
.then(response => {
this.toastService.success('Worker is updated successfully');
this.backToList();
})
.catch((rejected: RejectedResponse) => {
this.toastService.error(rejected.error);
});
} else {
this.focusFirstError();
}
}
resetForm() {
this.submitted = false;
this.loadWorker();
}
backToList() {
this.router.navigate([`${ROUTES.workers}/${this.jobType.id}`]);
}
private loadWorker() {
if (this.existing) {
this.workerForm.patchValue(this.existing);
} else {
this.workerService.get(this.jobType.id, this.id)
.then((response: WorkerModel) => {
this.existing = response;
this.workerForm.patchValue(response);
})
.catch((rejected: RejectedResponse) => {
this.toastService.error(rejected.error);
});
}
}
focusFirstError() {
const invalidControls = this.element.nativeElement.querySelectorAll('.form-control.ng-invalid');
(<HTMLInputElement>invalidControls[0]).focus();
}
}
|
"use strict";
exports.__esModule = true;
exports.widgetEditable = exports.widgetEnabled = exports.isInDisabledFieldset = void 0;
var _reactDom = require("react-dom");
var _matches = _interopRequireDefault(require("dom-helpers/query/matches"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var isInDisabledFieldset = function isInDisabledFieldset(inst) {
var node;
try {
node = (0, _reactDom.findDOMNode)(inst);
} catch (err) {
/* ignore */
}
return !!node && (0, _matches.default)(node, 'fieldset[disabled] *');
};
exports.isInDisabledFieldset = isInDisabledFieldset;
var widgetEnabled = interactionDecorator(true);
exports.widgetEnabled = widgetEnabled;
var widgetEditable = interactionDecorator(false);
exports.widgetEditable = widgetEditable;
function interactionDecorator(disabledOnly) {
function wrap(method) {
return function decoratedMethod() {
var _this$props = this.props,
disabled = _this$props.disabled,
readOnly = _this$props.readOnly;
disabled = isInDisabledFieldset(this) || disabled == true || !disabledOnly && readOnly === true;
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
if (!disabled) return method.apply(this, args);
};
}
return function decorate(target, key, desc) {
if (desc.initializer) {
var init = desc.initializer;
desc.initializer = function () {
return wrap(init.call(this)).bind(this);
};
} else desc.value = wrap(desc.value);
return desc;
};
} |
#!/bin/bash
# This script is meant to be called by the "install" step defined in
# .travis.yml. See http://docs.travis-ci.com/ for more details.
# The behavior of the script is controlled by environment variables defined
# in the .travis.yml in the top level folder of the project.
set -e
echo 'List files from cached directories'
echo 'pip:'
ls $HOME/.cache/pip
# for caching
export CC=/usr/lib/ccache/gcc
export CXX=/usr/lib/ccache/g++
# Useful for debugging how ccache is used
# export CCACHE_LOGFILE=/tmp/ccache.log
# ~60M is used by .ccache when compiling from scratch at the time of writing
ccache --max-size 100M --show-stats
# Deactivate the travis-provided virtual environment and setup a
# conda-based environment instead.
deactivate || echo "No virtualenv or condaenv to deactivate"
# install conda
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh
MINICONDA_PATH=/home/travis/miniconda
# append the path, update conda
chmod +x miniconda.sh && ./miniconda.sh -b -p $MINICONDA_PATH
export PATH=$MINICONDA_PATH/bin:$PATH
conda update --yes conda
# Create the conda env and install the requirements
conda create -n testenv --yes python=${PYTHON_VERSION}
source activate testenv
pip install -r requirements.txt
pip install pytest pytest-cov coverage codecov
# set up the package
python setup.py install
|
/* eslint-disable @typescript-eslint/no-unused-vars */
import React, { Fragment, ReactNode, useEffect, useState } from 'react'
import { PluginManagerComponent, PluginManagerSettings } from '../../types'
import PermisssionsSettings from './permissionsSettings'
import { Profile } from '@remixproject/plugin-utils'
import LocalPluginForm from './LocalPluginForm'
interface RootViewProps {
pluginComponent: PluginManagerComponent
pluginManagerSettings: PluginManagerSettings
children: ReactNode
}
export interface pluginDeactivated {
flag: boolean
profile: Profile
}
export interface pluginActivated {
flag: boolean
profile: Profile
}
function RootView ({ pluginComponent, pluginManagerSettings, children }: RootViewProps) {
const [visible, setVisible] = useState<boolean>(true)
const [filterPlugins, setFilterPlugin] = useState<string>('')
const openModal = () => {
setVisible(false)
}
const closeModal = () => setVisible(true)
useEffect(() => {
pluginComponent.getAndFilterPlugins(filterPlugins)
}, [filterPlugins])
return (
<Fragment>
<div id="pluginManager" data-id="pluginManagerComponentPluginManager">
<header className="form-group remixui_pluginSearch plugins-header py-3 px-4 border-bottom" data-id="pluginManagerComponentPluginManagerHeader">
<input
type="text"
onChange={(event) => {
setFilterPlugin(event.target.value.toLowerCase())
}}
value={filterPlugins}
className="form-control"
placeholder="Search"
data-id="pluginManagerComponentSearchInput"
/>
<button onClick={openModal} className="remixui_pluginSearchButton btn bg-transparent text-dark border-0 mt-2 text-underline" data-id="pluginManagerComponentPluginSearchButton">
Connect to a Local Plugin
</button>
</header>
{children}
<PermisssionsSettings pluginSettings={pluginManagerSettings}/>
</div>
<LocalPluginForm
closeModal={closeModal}
visible={visible}
pluginManager={pluginComponent}
/>
</Fragment>
)
}
export default RootView
|
#include <memory>
// Define the Line and FactoryState classes if not already defined
class Line {};
class FactoryState {};
class ProductionLine {
private:
const int _startY;
const Line& _line;
std::shared_ptr<const FactoryState> _factoryState;
public:
ProductionLine(int startY, const Line& line, std::shared_ptr<const FactoryState> factoryState)
: _startY(startY), _line(line), _factoryState(factoryState) {}
void updateFactoryState(std::shared_ptr<const FactoryState> newFactoryState) {
_factoryState = newFactoryState;
}
}; |
#!/usr/bin/env bash
set -Ceu
#---------------------------------------------------------------------------
# TSVの行順序を入れ替える(席替え)。
# CreatedAt: 2020-09-25
#---------------------------------------------------------------------------
Run() {
THIS="$(realpath "${BASH_SOURCE:-0}")"; HERE="$(dirname "$THIS")"; PARENT="$(dirname "$HERE")"; THIS_NAME="$(basename "$THIS")"; APP_ROOT="$HERE";
. "$APP_ROOT/lib/Error.sh"
Help() { eval "echo \"$(cat "$APP_ROOT/doc/help.txt")\""; }
while getopts :m: OPT; do
case $OPT in
m) ARG_METHOD="$OPTARG"; continue; ;;
*) Help; exit 1; ;;
esac
done
NAME_LIST="$(cat -)"
ARG_METHOD="${ARG_METHOD:-n}"
CheckArgs() {
IsMethod() {
case "${ARG_METHOD,,}" in
n|number) ;;
r|random) ;;
a|alternate) ;;
*) Throw '引数 m の値は n r a のいずれかであるべきです。' ;;
esac
}
IsNameList() {
[ '' = "$NAME_LIST" ] && { Throw '標準入力に名簿データがありません。TSV形式で出席番号、名字読み、名前読み、名字表記、名前表記、性別、の6列を持った形式で渡してください。'; }
echo -n '';
}
IsMethod
IsNameList
}
CheckArgs
ChangeForNumber() { echo -e "$NAME_LIST" | sort -n -t$'\t' -k1; }
ChangeForRandom() { echo -e "$NAME_LIST" | shuf; }
ChangeForAlternate() {
LIST_M="$(echo -e "$NAME_LIST" | awk -F "\t" '$6=="m"')"
LIST_F="$(echo -e "$NAME_LIST" | awk -F "\t" '$6=="f"')"
LIST_NUM_M=$(echo -e "$LIST_M" | wc -l)
LIST_NUM_F=$(echo -e "$LIST_F" | wc -l)
MF() { paste -d$'\n' <(echo -e "$LIST_M" | shuf) <(echo -e "$LIST_F" | shuf) | sed '/^$/d'; }
FM() { paste -d$'\n' <(echo -e "$LIST_F" | shuf) <(echo -e "$LIST_M" | shuf) | sed '/^$/d'; }
[ $LIST_NUM_F -eq $LIST_NUM_M ] && {
[ 0 -eq $((RANDOM % 2)) ] && { MF; } || { FM; }
} || {
[ $LIST_NUM_F -lt $LIST_NUM_M ] && { MF; } || { FM; }
}
}
case "${ARG_METHOD,,}" in
n|number) ChangeForNumber;;
r|random) ChangeForRandom;;
a|alternate) ChangeForAlternate;;
esac
}
Run "$@"
|
from typing import Union
class BankingSystem:
def __init__(self):
self.accounts = {}
def create_account(self, account_number: int) -> str:
if account_number in self.accounts:
return "Account already exists"
else:
self.accounts[account_number] = 0
return "Account created successfully"
def deposit(self, account_number: int, amount: float) -> str:
if account_number in self.accounts:
self.accounts[account_number] += amount
return "Deposit successful"
else:
return "Account does not exist"
def withdraw(self, account_number: int, amount: float) -> str:
if account_number in self.accounts:
if self.accounts[account_number] >= amount:
self.accounts[account_number] -= amount
return "Withdrawal successful"
else:
return "Insufficient funds"
else:
return "Account does not exist"
def check_balance(self, account_number: int) -> Union[str, float]:
if account_number in self.accounts:
return self.accounts[account_number]
else:
return "Account does not exist"
# Sample usage
bank = BankingSystem()
print(bank.create_account(123456)) # Output: Account created successfully
print(bank.deposit(123456, 1000)) # Output: Deposit successful
print(bank.check_balance(123456)) # Output: 1000.0
print(bank.withdraw(123456, 500)) # Output: Withdrawal successful
print(bank.check_balance(123456)) # Output: 500.0
print(bank.withdraw(123456, 1000)) # Output: Insufficient funds
print(bank.create_account(123456)) # Output: Account already exists
print(bank.create_account(789012)) # Output: Account created successfully |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package kms;
/**
*
* @author Gault
*/
public class Food extends MenuItem
{
public Food( String inputName, double inputPrice, boolean isAvailable )
{
super( inputName, inputPrice, isAvailable );
}
}
|
#!/bin/sh
set -e
build_dir=./build
love_dir=./love
lua_bindings=OFF
if [ "$1" = "clean" ]; then
echo "rm -rf ${build_dir}"
rm -rf ${build_dir}
exit $?
fi
mkdir -p ${build_dir}
mkdir -p ${love_dir}
cd ${build_dir}
cmake .. \
-DCMAKE_BUILD_TYPE=Debug \
-DLUNAPURPURA_BUILD_LUA_BINDINGS=${lua_bindings} \
-DLUNAPURPURA_BUILD_MRUBY_BINDINGS=OFF \
-DLUNAPURPURA_BUILD_TESTS=ON \
-DLUNAPURPURA_SDL_SUPPORT=OFF \
-DLUNAPURPURA_PNG_SUPPORT=OFF
make "$@"
if [ "${lua_bindings}" != "OFF" ]; then
for lib in clu prx xpk; do
cp -v ./src/lua/liblua${lib}.dylib ../${love_dir}/lua${lib}.so
done
fi
cd -
|
<gh_stars>1-10
import React from 'react';
export interface IconTabletProps extends React.SVGAttributes<SVGElement> {
color?: string;
size?: string | number;
className?: string;
style?: React.CSSProperties;
}
export const IconTablet: React.SFC<IconTabletProps> = (
props: IconTabletProps
): React.ReactElement => {
const { color, size, style, ...restProps } = props;
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={size}
height={size}
viewBox="0 0 24 24"
fill="none"
stroke={color}
className="feather feather-tablet"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
style={{ verticalAlign: 'middle', ...style }}
{...restProps}
>
<rect
x="4"
y="2"
width="16"
height="20"
rx="2"
ry="2"
transform="rotate(180 12 12)"
/>
<line x1="12" y1="18" x2="12" y2="18" />
</svg>
);
};
IconTablet.defaultProps = {
color: 'currentColor',
size: '1em',
};
export default IconTablet;
|
#! /bin/bash
set -e
if [ "$NODE_ENV" == "production" ]; then
set -x
fi
function version {
printf "%03d%03d%03d" $(echo "$1" | tr '.' ' ')
}
: ${MAX_OLD_SPACE_SIZE:=3072}
OPT=(--max_old_space_size=$MAX_OLD_SPACE_SIZE -r dotenv/config)
if [ "$NODE_ENV" != "production" ]; then
if [ ! -f "./.env" ]; then
echo -e "\033[1;31m WARNING: Missing .env file, see CONTRIBUTING.md. \033[0m"
fi
OPT+=(--preserve-symlinks)
if [ "$TERM_PROGRAM" == "vscode" ] && \
[ ! -z "$TERM_PROGRAM_VERSION" ] && \
[ $(version $TERM_PROGRAM_VERSION) -ge $(version "1.22.2") ]
then
# Sometimes we have force and metaphysics running at the same time.
# in that case we don't want to fail to launch, but simply show a warning
# that the debugging won't work.
if (nc -z 127.0.0.1 9229) &> /dev/null; then
echo
echo "WARNING! You are already debugging another node process!"
echo
echo " force will start without --inspect-brk unless you kill the other process"
else
OPT+=(--inspect-brk)
fi
fi
yarn relay --watch & exec node "${OPT[@]}" ./src
else
exec node "${OPT[@]}" ./server.dist.js
fi
|
<filename>packages/staker/src/zksync/utils.js
'use strict';
import {getConfig} from '../utils';
import {jsonRpcFetch} from '../helpers/jsonRpc';
// ==== zkSync JSON RPC ===
// https://zksync.io/api/v0.1.html
let idx = 1;
export const rpcTokens = JSON.stringify({
jsonrpc: '2.0',
id: idx++,
method: 'tokens',
params: [],
});
export const rpcZkSyncBalance = (address) => JSON.stringify({
jsonrpc: '2.0',
id: idx++,
method: 'account_info',
params: [address],
});
// https://github.com/matter-labs/zksync/blob/dev/etc/js/env-config.js
let cachedZkSyncNodeUrl = '';
/**
* Random pick a zksync node.
*
* can set yours via set SAIHUBOT_ZKSYNC_NODE_URL environment variable.
*/
export const getZksyncNodeURL = () => {
if (cachedZkSyncNodeUrl) return cachedZkSyncNodeUrl;
cachedZkSyncNodeUrl = getConfig('ZKSYNC_NODE_URL', 'https://api.zksync.io/jsrpc');
return cachedZkSyncNodeUrl;
}
export const zksyncFetch = (fetch, body) =>
jsonRpcFetch(fetch, getZksyncNodeURL(), body);
|
filtered_list = [x for x in word_list if keyword not in x]
# filtered_list = ['hello', 'this', 'word'] |
<filename>src/main/scala/sims/dynamics/Collision.scala
/* _____ _ __ ________ ___ *\
** / ___/(_) |/ / ___/ |__ \ Simple Mechanics Simulator 2 **
** \__ \/ / /|_/ /\__ \ __/ / copyright (c) 2011 <NAME> **
** ___/ / / / / /___/ / / __/ **
** /____/_/_/ /_//____/ /____/ **
\* */
package sims.dynamics
import sims.dynamics._
import sims.dynamics.constraints._
import sims.collision.{Collision => CCollision}
/** A class representing a physical collision,
* implementing constraints to handle collision response. */
class Collision(collision: CCollision[Shape]) extends Constraining {
private def getNonPenetrationConstraints() = for (point <- collision.points) yield
new Constraint {
val body1 = collision.item1.body
val body2 = collision.item2.body
def v = body2.velocityOfPoint(point) - body1.velocityOfPoint(point)
val e = {
if ((v dot collision.normal.unit) > 0) 0.0
else if ((v dot collision.normal.unit) > -1) 0.0
else math.min(collision.item1.restitution, collision.item2.restitution)
}
def jacobian = new Jacobian(-collision.normal, -((point - body1.position) cross collision.normal),
collision.normal, ((point - body2.position) cross collision.normal))
override def bias = (v dot collision.normal.unit) * e
def value = -collision.overlap
override def inequality = true
override val limit = Some((0.0, Double.PositiveInfinity))
val slop = 0.005
override def error =
if (collision.overlap > slop)
-(collision.overlap - slop)
else 0.0
}
val constraints = getNonPenetrationConstraints()
}
object Collision {
/**Converts a collision to a physical collision
* (sims.collision.Collision to a sims.dynamics.Collision)*/
implicit def collision2Physical(c: sims.collision.Collision[Shape]) = new Collision(c)
implicit def collision2Constructor(c: sims.collision.Collision[Shape]) = new { def toPhysical = new Collision(c) }
} |
#ifndef _MENUITEMSCALE_H_
#define _MENUITEMSCALE_H_
/*
MenuItemScale is based on MenuItemNumeric.
https://github.com/lovyan03/M5Stack_TreeView/blob/master/src/MenuItemNumeric.h
*/
#include <MenuItemSpinner.h>
#include <Scale.h>
class MenuItemScale : public MenuItemSpinner {
public:
MenuItemScale(const String& title, int value, int tg = 0, TCallBackEnter cb = 0)
: MenuItemSpinner(title,Scale::getAvailableScales().size() - 1, value, tg, cb) {};
MenuItemScale(const String& title, int value, TCallBackEnter cb)
: MenuItemSpinner(title,Scale::getAvailableScales().size() - 1, value, cb) {};
String getStringOfItem(int value) override;
};
#endif |
<reponame>john-aws/covid19-app-system-public<filename>tools/build/lib/nhsx/tasks/tag.rb
namespace :tag do
include NHSx::Git
namespace :analytics do
NHSx::TargetEnvironment::ANALYTICS_TARGET_ENVIRONMENTS.each do |account, tgt_envs|
tgt_envs.each do |tgt_env|
desc "Push immutable tag of the current git SHA for the Analytics subsystem to #{tgt_env}"
task :"#{tgt_env}" => [:"login:#{account}"] do
push_timestamped_tag("analytics", tgt_env, "Release Analytics on #{tgt_env}", $configuration)
end
end
end
end
namespace :"app-system" do
NHSx::TargetEnvironment::TARGET_ENVIRONMENTS.each do |account, tgt_envs|
tgt_envs.each do |tgt_env|
desc "Push immutable tag of the current git SHA for the App System to #{tgt_env}"
task :"#{tgt_env}" => [:"login:#{account}"] do
push_timestamped_tag("system", tgt_env, "Release App System on #{tgt_env}", $configuration)
end
end
end
end
namespace :conpan do
NHSx::TargetEnvironment::TARGET_ENVIRONMENTS.each do |account, tgt_envs|
tgt_envs.each do |tgt_env|
desc "Push immutable tag of the current git SHA for the Control Panel to #{tgt_env}"
task :"#{tgt_env}" => [:"login:#{account}"] do
push_timestamped_tag("conpan", tgt_env, "Release Control Panel on #{tgt_env}", $configuration)
end
end
end
end
namespace :doreto do
NHSx::TargetEnvironment::DORETO_TARGET_ENVIRONMENTS["dev"].each do |tgt_env|
desc "Push immutable tag of the current git SHA for the Document Reporting Tool to #{tgt_env}"
task :"#{tgt_env}" do
push_timestamped_tag("doreto", tgt_env, "Release doreto on #{tgt_env}", $configuration)
end
end
end
namespace :synth do
NHSx::TargetEnvironment::TARGET_ENVIRONMENTS.each do |account, tgt_envs|
tgt_envs.each do |tgt_env|
desc "Push immutable tag of the current git SHA for the Synthetic Canaries to #{tgt_env}"
task :"#{tgt_env}" => [:"login:#{account}"] do
push_timestamped_tag("synth", tgt_env, "Release Synthetic Canaries on #{tgt_env}", $configuration)
end
end
end
end
end
|
#!/bin/sh
set -e
if [ $(echo "$1" | cut -c1) = "-" ]; then
echo "$0: assuming arguments for bgoldd"
set -- bgoldd "$@"
fi
if [ $(echo "$1" | cut -c1) = "-" ] || [ "$1" = "bgoldd" ]; then
mkdir -p "$BITCOIN_GOLD_DATA"
chmod 700 "$BITCOIN_GOLD_DATA"
chown -R bitcoingold "$BITCOIN_GOLD_DATA"
echo "$0: setting data directory to $BITCOIN_GOLD_DATA"
set -- "$@" -datadir="$BITCOIN_GOLD_DATA"
fi
if [ "$1" = "bgoldd" ] || [ "$1" = "bgold-cli" ] || [ "$1" = "bitcoin-tx" ]; then
echo
exec su-exec bitcoingold "$@"
fi
echo
exec "$@"
|
<filename>script/queries/elected_plans_group_file_xml.rb
p_ids = ["<KEY>1",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"5453a543791e4bcd3300006f",
"<KEY>0",
"5453a544791e4bcd33000098",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>4",
"<KEY>"]
plans = Plan.where("id" => {"$in" => p_ids})
pxml = File.open("plans.xml",'w')
builder = Nokogiri::XML::Builder.new do |xml|
xml.root('xmlns:ns1' => 'bar'){
xml['ns1'].plans{
plans.each do |p|
xml['ns1'].plan{
xml['ns1'].qhp_id p.hios_plan_id
xml['ns1'].plan_exchange_id p.id
xml['ns1'].carrier_id 116036
xml['ns1'].plan_name p.name
xml['ns1'].coverage_type "POS"
xml['ns1'].original_effective_date "2015-07-01"
}
end
}
}
end
pxml.print(builder.to_xml)
|
from setuptools import setup
setup(
name="dlw",
version="1.0",
packages=["dlw"],
install_requires=[
"numpy",
"scipy",
"python-csv",
"flask",
"flask-restful",
"pandas", # Added missing dependency
"scikit-learn", # Added missing dependency
"tensorflow" # Added missing dependency
],
entry_points={"console_scripts": {"start-dlw = dlw.web.app:run_app"}} # Added command-line interface entry point
) |
<filename>backend/payment/api/v1/serializers.py
from rest_framework import serializers
from payment.models import Services
class ServicesSerializer(serializers.ModelSerializer):
class Meta:
model = Services
fields = "__all__"
|
<filename>inference-engine/src/vpu/graph_transformer/src/model/data.cpp
// Copyright (C) 2018-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#include <vpu/model/data.hpp>
#include <array>
#include <algorithm>
#include <queue>
#include <memory>
#include <vector>
#include <unordered_map>
#include <map>
#include <string>
#include <set>
#include <utility>
#include <precision_utils.h>
#include <ie_parallel.hpp>
#include <vpu/model/edges.hpp>
#include <vpu/model/stage.hpp>
#include <vpu/backend/backend.hpp>
#include <vpu/utils/ie_helpers.hpp>
#include <vpu/utils/numeric.hpp>
#include <vpu/compile_env.hpp>
namespace vpu {
//
// DataContent
//
DataContent::~DataContent() = default;
const void* CalculatedDataContent::getRaw() const {
if (_temp.empty()) {
_temp.resize(getTempBufSize(_baseContents));
fillTempBuf(_baseContents, _temp.data());
_baseContents.clear();
}
return _temp.data();
}
size_t CalculatedDataContent::getTempBufSize(const SmallVector<DataContent::Ptr, 2>&) const {
return checked_cast<size_t>(desc().totalDimSize()) *
checked_cast<size_t>(desc().elemSize());
}
namespace {
class IeBlobContent final : public DataContent {
public:
IeBlobContent(const ie::Blob::Ptr& blob, int repeat) : _blob(blob), _repeat(repeat) {}
protected:
const void* getRaw() const override {
if (desc().type() == DataType::FP16) {
if (_blobFp16 == nullptr) {
_blobFp16 = getBlobFP16(_blob);
_blob.reset();
}
if (_repeat == 1) {
return _blobFp16->cbuffer();
} else {
if (_tempFp16.empty()) {
VPU_PROFILE(IeBlobContent);
IE_ASSERT(desc().totalDimSize() % _repeat == 0);
auto origNumElems = desc().totalDimSize() / _repeat;
IE_ASSERT(checked_cast<size_t>(origNumElems) <= _blobFp16->size());
auto origPtr = _blobFp16->cbuffer().as<const fp16_t*>();
IE_ASSERT(origPtr != nullptr);
_tempFp16.resize(checked_cast<size_t>(desc().totalDimSize()));
ie::parallel_for(_repeat, [this, origPtr, origNumElems](int i) {
std::copy_n(origPtr, origNumElems, _tempFp16.data() + i * origNumElems);
});
}
return _tempFp16.data();
}
} else if (desc().type() == DataType::S32) {
if (_repeat == 1) {
return _blob->cbuffer();
} else {
if (_tempS32.empty()) {
VPU_PROFILE(IeBlobContent);
IE_ASSERT(desc().totalDimSize() % _repeat == 0);
auto origNumElems = desc().totalDimSize() / _repeat;
IE_ASSERT(checked_cast<size_t>(origNumElems) <= _blob->size());
auto origPtr = _blob->cbuffer().as<const int32_t*>();
IE_ASSERT(origPtr != nullptr);
_tempS32.resize(checked_cast<size_t>(desc().totalDimSize()));
ie::parallel_for(_repeat, [this, origPtr, origNumElems](int i) {
std::copy_n(origPtr, origNumElems, _tempS32.data() + i * origNumElems);
});
}
return _tempS32.data();
}
} else {
VPU_THROW_EXCEPTION << "Unsupported data type " << desc().type();
}
}
private:
mutable ie::Blob::Ptr _blob;
int _repeat = 0;
mutable ie::Blob::Ptr _blobFp16;
mutable std::vector<fp16_t> _tempFp16;
mutable std::vector<int32_t> _tempS32;
};
} // namespace
DataContent::Ptr ieBlobContent(const ie::Blob::Ptr& blob, int repeat) {
return std::make_shared<IeBlobContent>(blob, repeat);
}
namespace {
class ReplicatedContent final : public CalculatedDataContent {
public:
ReplicatedContent(float val, int count) : _factor{val}, _count(count) {}
ReplicatedContent(DataContent::Ptr origContent, int count) :
CalculatedDataContent({std::move(origContent)}), _count(count) {
}
protected:
size_t getTempBufSize(const SmallVector<DataContent::Ptr, 2>& baseContents) const override {
if (baseContents.empty()) {
return checked_cast<size_t>(_count) * sizeof(fp16_t);
} else {
IE_ASSERT(baseContents.size() == 1);
IE_ASSERT(desc().totalDimSize() % _count == 0);
return checked_cast<size_t>(desc().totalDimSize()) * sizeof(fp16_t);
}
}
void fillTempBuf(const SmallVector<DataContent::Ptr, 2>& baseContents, void* tempBuf) const override {
VPU_PROFILE(ReplicatedContent);
auto dstPtr = static_cast<fp16_t*>(tempBuf);
if (baseContents.empty()) {
std::fill_n(dstPtr, _count, ie::PrecisionUtils::f32tof16(_factor));
} else {
IE_ASSERT(baseContents.size() == 1);
IE_ASSERT(desc().totalDimSize() % _count == 0);
auto origCount = desc().totalDimSize() / _count;
auto origPtr = baseContents[0]->get<fp16_t>();
IE_ASSERT(origPtr != nullptr);
ie::parallel_for(_count, [origPtr, origCount, dstPtr](int i) {
std::copy_n(origPtr, origCount, dstPtr + i * origCount);
});
}
}
private:
float _factor = 1.0f;
int _count = 0;
};
} // namespace
DataContent::Ptr replicateContent(float val, int count) {
return std::make_shared<ReplicatedContent>(val, count);
}
DataContent::Ptr replicateContent(const DataContent::Ptr& origContent, int count) {
return std::make_shared<ReplicatedContent>(origContent, count);
}
namespace {
class ScaledContent final : public CalculatedDataContent {
public:
ScaledContent(const DataContent::Ptr& origContent, float scale) :
CalculatedDataContent({origContent}), _factor(scale) {
}
protected:
void fillTempBuf(const SmallVector<DataContent::Ptr, 2>& baseContents, void* tempBuf) const override {
VPU_PROFILE(ScaledContent);
IE_ASSERT(baseContents.size() == 1);
auto totalSize = desc().totalDimSize();
auto origDesc = baseContents[0]->desc();
IE_ASSERT(origDesc.type() == DataType::FP16);
IE_ASSERT(origDesc.totalDimSize() == totalSize);
auto srcPtr = baseContents[0]->get<fp16_t>();
IE_ASSERT(srcPtr != nullptr);
auto dstPtr = static_cast<fp16_t*>(tempBuf);
ie::parallel_for(totalSize, [this, srcPtr, dstPtr](int i) {
dstPtr[i] = ie::PrecisionUtils::f32tof16(ie::PrecisionUtils::f16tof32(srcPtr[i]) * _factor);
});
}
private:
float _factor = 1.0f;
};
} // namespace
DataContent::Ptr scaleContent(const DataContent::Ptr& origContent, float scale) {
return std::make_shared<ScaledContent>(origContent, scale);
}
namespace {
class ScaledChannelContent final : public CalculatedDataContent {
public:
ScaledChannelContent(
const DataContent::Ptr& origContent,
const DataContent::Ptr& scaleContent) :
CalculatedDataContent({origContent, scaleContent}) {
}
protected:
void fillTempBuf(const SmallVector<DataContent::Ptr, 2>& baseContents, void* tempBuf) const override {
VPU_PROFILE(ScaledChannelContent);
IE_ASSERT(baseContents.size() == 2);
auto totalSize = desc().totalDimSize();
IE_ASSERT(desc().numDims() == 4 && desc().dimsOrder() == DimsOrder::NCHW);
auto numN = desc().dim(Dim::N);
auto numC = desc().dim(Dim::C);
auto numH = desc().dim(Dim::H);
auto numW = desc().dim(Dim::W);
auto origDesc = baseContents[0]->desc();
IE_ASSERT(origDesc.type() == DataType::FP16);
IE_ASSERT(origDesc.totalDimSize() == totalSize);
IE_ASSERT(baseContents[1]->desc().totalDimSize() == numN);
auto srcPtr = baseContents[0]->get<fp16_t>();
IE_ASSERT(srcPtr != nullptr);
auto scale = baseContents[1]->get<fp16_t>();
IE_ASSERT(scale != nullptr);
auto dstPtr = static_cast<fp16_t*>(tempBuf);
for (int n = 0; n < numN; n++) {
for (int c = 0; c < numC; c++) {
for (int h = 0; h < numH; h++) {
for (int w = 0; w < numW; w++) {
dstPtr[n * numC * numH * numW + c * numH * numW + h * numW + w] =
srcPtr[n * numC * numH * numW + c * numH * numW + h * numW + w] * scale[n];
}
}
}
}
}
};
} // namespace
DataContent::Ptr scaledChannelContent(
const DataContent::Ptr& origContent,
const DataContent::Ptr& scaleContent) {
return std::make_shared<ScaledChannelContent>(origContent, scaleContent);
}
//
// DataNode
//
Data DataNode::getTopParentData() const {
Data topParent = this;
while (auto nextParent = topParent->parentData()) {
topParent = nextParent;
}
return topParent;
}
DimValues DataNode::strides() const {
if (_parentDataEdge != nullptr) {
if (_parentDataEdge->mode() == SharedDataMode::ROI) {
return _parentDataEdge->parent()->strides();
}
}
return calcStrides(_desc, _requiredStrides);
}
int DataNode::totalByteSize() const {
// IT doesn't have sence for child Data.
IE_ASSERT(_parentDataEdge == nullptr);
return calcTotalByteSize(_desc, strides());
}
int DataNode::elemOffset(const DimValues& coord) const {
auto strides = this->strides();
int res = 0;
for (const auto& p : coord) {
IE_ASSERT(_desc.dimsOrder().hasDim(p.first));
IE_ASSERT(p.second < _desc.dim(p.first));
res += p.second * strides[p.first];
}
return res;
}
int DataNode::lastElemOffset() const {
DimValues lastElem;
for (const auto& p : _desc.dims()) {
lastElem.set(p.first, p.second - 1);
}
return elemOffset(lastElem);
}
bool DataNode::canHaveAParent() const {
return parentData() == nullptr && usage() == DataUsage::Intermediate;
}
bool DataNode::checkStrides(const StridesRequirement& reqs) const {
return vpu::checkStrides(_desc, strides(), reqs);
}
void DataNode::updateRequiredStrides(const StridesRequirement& newReqs) {
// There shouldn't be any Data<->Data edges.
IE_ASSERT(_parentDataEdge == nullptr);
IE_ASSERT(_childDataEdges.empty());
auto prevReqs = _requiredStrides;
StridesRequirement mergedReqs;
const auto& fixedRequirements = prevReqs.fixedStrides().empty() ? newReqs : prevReqs;
if (!fixedRequirements.fixedStrides().empty()) {
mergedReqs = fixedRequirements;
} else {
for (int i = 0; i < _desc.numDims(); ++i) {
auto prevReq = prevReqs.get(i);
auto newReq = newReqs.get(i);
if (prevReq == DimStride::Any &&
newReq == DimStride::Any) {
continue;
}
// In case if both requirements are defined, use `prevReq`.
// We'll check that both requirements are satisfied at the end.
if (prevReq != DimStride::Any) {
mergedReqs.add(i, prevReq);
} else {
mergedReqs.add(i, newReq);
}
}
}
_requiredStrides = mergedReqs;
IE_ASSERT(checkStrides(prevReqs));
IE_ASSERT(checkStrides(newReqs));
}
void DataNode::clearAllocation() {
_location = DataLocation::None;
_memoryOffset = 0;
attrs().erase("ioBufferOffset");
}
void DataNode::setMemReqs(MemoryType mem) {
if (mem != MemoryType::DDR) {
IE_ASSERT(_usage == DataUsage::Intermediate);
}
_memReqs = mem;
}
void DataNode::setIOInfo(DataLocation location, int ioBufferOffset) {
IE_ASSERT(_usage == DataUsage::Input || _usage == DataUsage::Output);
if (_usage == DataUsage::Input) {
IE_ASSERT(location == DataLocation::Input);
} else if (_usage == DataUsage::Output) {
IE_ASSERT(location == DataLocation::Output);
}
_location = location;
_memoryOffset = 0;
attrs().set<int>("ioBufferOffset", ioBufferOffset);
}
void DataNode::setAllocationInfo(DataLocation location, int memoryOffset) {
IE_ASSERT(_usage == DataUsage::Const || _usage == DataUsage::Intermediate || _usage == DataUsage::Temp);
if (_usage == DataUsage::Const) {
IE_ASSERT(location == DataLocation::Blob);
} else if (_usage == DataUsage::Temp) {
IE_ASSERT(location == DataLocation::BSS);
}
_location = location;
_memoryOffset = memoryOffset;
}
void DataNode::serializeBuffer(
BlobSerializer& serializer,
DimsOrder newOrder) {
if (newOrder.numDims() == 0) {
serializeBufferImpl(serializer, _desc, this->strides());
} else {
IE_ASSERT(newOrder.numDims() >= _desc.dimsOrder().numDims());
auto newDims = _desc.dims();
auto newStrides = this->strides();
auto newPerm = newOrder.toPermutation();
auto origOrder = _desc.dimsOrder();
auto origPerm = origOrder.toPermutation();
size_t origPermInd = 0;
for (size_t i = 0; i < newPerm.size(); i++) {
auto d = newPerm[i];
if (origPermInd < origPerm.size() && origPerm[origPermInd] == d) {
++origPermInd;
continue;
}
newDims.set(d, 1);
if (i == 0) {
newStrides.set(d, _desc.elemSize());
} else {
newStrides.set(d, newStrides[newPerm[i - 1]] * newDims[newPerm[i - 1]]);
}
}
IE_ASSERT(origPermInd == origPerm.size());
DataDesc newDesc(_desc.type(), newOrder, newDims);
serializeBufferImpl(serializer, newDesc, newStrides);
}
}
void DataNode::serializeIOInfo(BlobSerializer& serializer) const {
auto ioIdx = attrs().get<int>("ioIdx");
serializer.append(checked_cast<uint32_t>(ioIdx));
auto ioBufferOffset = attrs().get<int>("ioBufferOffset");
serializer.append(checked_cast<uint32_t>(ioBufferOffset));
auto nameLength = checked_cast<uint32_t>(_name.length());
auto nameLengthAligned = alignVal(nameLength, 16u);
serializer.append(nameLengthAligned);
for (auto c : _name) {
serializer.append(c);
}
for (uint32_t i = 0; i < nameLengthAligned - nameLength; ++i) {
serializer.append(uint8_t(0));
}
serializeDescImpl(serializer, _desc, strides());
}
void DataNode::serializeDescImpl(
BlobSerializer& serializer,
const DataDesc& storedDesc,
const DimValues& storedStrides) const {
IE_ASSERT(storedDesc.numDims() <= MAX_DIMS_32);
const auto& storedDims = storedDesc.dims();
auto storedDimsOrder = storedDesc.dimsOrder();
auto storedPerm = storedDimsOrder.toPermutation();
IE_ASSERT(!storedPerm.empty());
serializer.append(checked_cast<uint32_t>(storedDesc.type()));
serializer.append(checked_cast<uint32_t>(storedDimsOrder.code()));
serializer.append(checked_cast<uint32_t>(storedPerm.size()));
for (auto d : storedPerm) {
serializer.append(checked_cast<uint32_t>(storedDims[d]));
}
for (auto d : storedPerm) {
serializer.append(checked_cast<uint32_t>(storedStrides[d]));
}
}
void DataNode::serializeBufferImpl(
BlobSerializer& serializer,
const DataDesc& storedDesc,
const DimValues& storedStrides) const {
serializeDescImpl(serializer, storedDesc, storedStrides);
serializer.append(checked_cast<uint32_t>(_location));
if (_location == DataLocation::Input || _location == DataLocation::Output) {
auto topParent = getTopParentData();
auto ioIdx = topParent->attrs().get<int>("ioIdx");
serializer.append(checked_cast<uint32_t>(ioIdx));
auto parentByteSize = topParent->totalByteSize();
serializer.append(checked_cast<uint32_t>(parentByteSize));
}
serializer.append(checked_cast<uint32_t>(_memoryOffset));
}
void printTo(std::ostream& os, const Data& data) {
os << (data == nullptr ? "<null>" : data->name());
}
//
// loopOverData
//
namespace {
struct StopSignal final {};
void loopOverDataImpl(
const Data& data,
const FuncRef<DataLoopStatus(const Data&)>& op) {
for (const auto& childData : data->childDatas()) {
auto status = op(childData);
if (status == DataLoopStatus::NextChild) {
loopOverDataImpl(childData, op);
} else if (status == DataLoopStatus::Stop) {
throw StopSignal();
}
}
}
} // namespace
void loopOverData(
const Data& data,
const FuncRef<DataLoopStatus(const Data&)>& op) {
auto status = op(data);
if (status != DataLoopStatus::NextChild)
return;
try {
loopOverDataImpl(data, op);
} catch (const StopSignal&) {
return;
}
}
} // namespace vpu
|
<reponame>smagill/opensphere-desktop
package io.opensphere.core.util.time;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import io.opensphere.core.model.time.TimeSpan;
import io.opensphere.core.units.duration.Duration;
import io.opensphere.core.units.duration.Months;
/**
* This is a simple DateDurationKey object.
*
* Do NOT add layer metadata to this object Instead, use TiledDateDurationKey
*/
/**
* The Class DateDurationKey.
*/
//TODO What is this class for? Is this a key to look up something else?
public class DateDurationKey implements Comparable<DateDurationKey>
{
/** The start date. */
private Date myStartDate;
/** The end date. */
private Date myEndDate;
/** The duration. */
private Duration myDuration;
/** The children. */
private List<DateDurationKey> myChildren;
/** The has data. */
private boolean myHasData;
/** The has children. */
private boolean myHasChildren;
/** The my name. */
private String myName;
// Do lazy caching of this object's hash value, since a large
// number of these objects appear in hash sets.
/** The hash calculated. */
private boolean myHashCalculated;
/** The my hash. */
private int myHash;
/** The my sort index. */
private int mySortIndex;
/** The Is expanded. */
private boolean myExpanded;
/**
* Instantiates a new date duration key.
*/
public DateDurationKey()
{
super();
myChildren = new ArrayList<>();
myStartDate = null;
myEndDate = null;
myDuration = Months.ONE;
}
/**
* Instantiates a new date duration key.
*
* @param startDate the start date
* @param endDate the end date
* @param duration the duration
*/
public DateDurationKey(Date startDate, Date endDate, Duration duration)
{
myChildren = new ArrayList<>();
// Don't want to store or pass back mutable references to these Dates
// clone() is the Java SDK's recommended way to get date copies.
// Enums are pass by value anyway.
myStartDate = (Date)startDate.clone();
myEndDate = (Date)endDate.clone();
myDuration = duration;
}
/**
* Instantiates a new date duration key.
*
* @param startDate the start date
* @param endDate the end date
* @param duration the duration
* @param children the children
*/
public DateDurationKey(Date startDate, Date endDate, Duration duration, Collection<? extends DateDurationKey> children)
{
myChildren = new ArrayList<>();
myChildren.addAll(children);
Collections.sort(myChildren);
// Don't want to store or pass back mutable references to these Dates
// clone() is the Java SDK's recommended way to get date copies.
// Enums are pass by value anyway.
myStartDate = (Date)startDate.clone();
myEndDate = (Date)endDate.clone();
myDuration = duration;
}
@Override
public int compareTo(DateDurationKey ddk)
{
final int equal = 0;
int toReturn = 0;
// Double thisIndex = new Double(mySortIndex);
// Double inIndex = new Double(ddk.getMySortIndex());
if (this == ddk)
{
toReturn = equal;
}
else
{
toReturn = myStartDate.compareTo(ddk.getStartDate());
if (toReturn == 0)
{
toReturn = myEndDate.compareTo(ddk.getEndDate());
}
if (toReturn == 0)
{
toReturn = myDuration.compareTo(ddk.getDuration());
}
}
return toReturn;
}
// /**
// * Instantiates a new date duration key.
// *
// * @param startDate the start date
// * @param endDate the end date
// * @param aDurationInSeconds the a duration in seconds
// */
// public DateDurationKey(Date startDate, Date endDate, int aDurationInSeconds)
// {
// // Don't want to store or pass back mutable references to these Dates
// // clone() is the Java SDK's recommended way to get date copies.
// // Enums are pass by value anyway.
// myStartDate = (Date)startDate.clone();
// myEndDate = (Date)endDate.clone();
//
// myDuration = Duration.CUSTOM;
// myCustomDurationInSeconds = aDurationInSeconds;
// }
/**
* Contains date.
*
* @param in the in
* @return true, if successful
*/
public boolean containsDate(Date in)
{
return in.getTime() < myEndDate.getTime() && in.getTime() >= myStartDate.getTime();
}
/**
* Destroy.
*/
public void destroy()
{
myStartDate = null;
myEndDate = null;
myDuration = null;
if (myChildren != null)
{
for (DateDurationKey iter : myChildren)
{
iter.destroy();
iter = null;
}
myChildren.clear();
myChildren = null;
}
myHashCalculated = false;
}
@Override
public boolean equals(Object o)
{
if (o instanceof DateDurationKey)
{
DateDurationKey other = (DateDurationKey)o;
return getDuration().equals(other.getDuration()) && getStartDate().getTime() == other.getStartDate().getTime();
}
return false;
}
/**
* Gets the as time span.
*
* @return the as time span
*/
public TimeSpan getAsTimeSpan()
{
return TimeSpan.get(myStartDate, myEndDate);
}
/**
* Gets the my children.
*
* @return the my children
*/
public List<DateDurationKey> getChildren()
{
return myChildren;
}
/**
* Gets the duration.
*
* @return the duration
*/
public Duration getDuration()
{
return myDuration;
}
/**
* Gets the end date.
*
* @return the endDate
*/
public Date getEndDate()
{
return (Date)myEndDate.clone();
}
/**
* Gets the my name.
*
* @return the my name
*/
public String getName()
{
return myName;
}
/**
* Gets the my sort index.
*
* @return the my sort index
*/
public double getSortIndex()
{
return mySortIndex;
}
/**
* Gets the span.
*
* @return the span
*/
public TimeSpan getSpan()
{
return TimeSpan.get((Date)myStartDate.clone(), (Date)myEndDate.clone());
}
/**
* Gets the start date.
*
* @return the start date
*/
public Date getStartDate()
{
Date toReturn = null;
if (myStartDate != null)
{
toReturn = (Date)myStartDate.clone();
}
return toReturn;
}
@Override
public int hashCode()
{
if (!myHashCalculated)
{
final int prime = 31;
myHash = 1;
myHash = prime * myHash + (myDuration == null ? 0 : myDuration.hashCode());
myHash = prime * myHash + (myEndDate == null ? 0 : myEndDate.hashCode());
myHash = prime * myHash + (myStartDate == null ? 0 : myStartDate.hashCode());
myHashCalculated = true;
}
return myHash;
}
/**
* Checks if is destroyed.
*
* @return true, if is destroyed
*/
public boolean isDestroyed()
{
return null == myEndDate || null == myStartDate;
}
/**
* Get if this DDK has been expanded.
*
* @return The expanded flag.
*/
public boolean isExpanded()
{
return myExpanded;
}
/**
* Checks if is checks for children.
*
* @return true, if is checks for children
*/
public boolean isHasChildren()
{
return myHasChildren;
}
/**
* Checks if is checks for data.
*
* @return true, if is checks for data
*/
public boolean isHasData()
{
return myHasData;
}
/**
* Sets the duration.
*
* @param duration the new duration
*/
public void setDuration(Duration duration)
{
myDuration = duration;
myHashCalculated = false;
}
/**
* Sets the end date.
*
* @param endDate the endDate to set
*/
public void setEndDate(Date endDate)
{
myEndDate = (Date)endDate.clone();
myHashCalculated = false;
}
/**
* Set if this DDK has been expanded.
*
* @param expanded Flag indicating if the DDK has been expanded.
*/
public void setExpanded(boolean expanded)
{
myExpanded = expanded;
}
/**
* Sets the checks for children.
*
* @param hasChildren the new checks for children
*/
public void setHasChildren(boolean hasChildren)
{
myHasChildren = hasChildren;
}
/**
* Sets the checks for data.
*
* @param hasData the new checks for data
*/
public void setHasData(boolean hasData)
{
myHasData = hasData;
}
/**
* Sets the my name.
*
* @param s the new my name
*/
public void setName(String s)
{
myName = s;
}
/**
* Sets the my sort index.
*
* @param sortIndex the new my sort index
*/
public void setSortIndex(int sortIndex)
{
mySortIndex = sortIndex;
}
/**
* Sets the start date.
*
* @param date the new start date
*/
public void setStartDate(Date date)
{
myStartDate = (Date)date.clone();
myHashCalculated = false;
}
/**
* To string.
*
* @return the string
* @see java.lang.Object#toString()
*/
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append('[');
sb.append(myStartDate);
sb.append(" - ");
sb.append(myEndDate);
sb.append(':');
sb.append(myDuration);
if (!myHasData)
{
sb.append("no ");
}
sb.append("data] ");
return sb.toString();
}
/**
* To string little.
*
* @return the string
*/
public String toStringLittle()
{
StringBuilder sb = new StringBuilder();
sb.append('[');
sb.append(myStartDate);
sb.append(" - ");
sb.append(myEndDate);
sb.append(':');
sb.append(myDuration);
sb.append("] ");
return sb.toString();
}
}
|
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tle.core.migration.initial;
import com.tle.beans.ConfigurationProperty;
import com.tle.beans.Institution;
import com.tle.beans.ItemDefinitionScript;
import com.tle.beans.Language;
import com.tle.beans.ReferencedURL;
import com.tle.beans.SchemaScript;
import com.tle.beans.Staging;
import com.tle.beans.UserPreference;
import com.tle.beans.activation.ActivateRequest;
import com.tle.beans.entity.BaseEntity;
import com.tle.beans.entity.EntityLock;
import com.tle.beans.entity.LanguageBundle;
import com.tle.beans.entity.LanguageString;
import com.tle.beans.entity.itemdef.ItemDefinition;
import com.tle.beans.entity.itemdef.ItemdefBlobs;
import com.tle.beans.item.*;
import com.tle.beans.item.attachments.*;
import com.tle.beans.item.cal.request.CourseInfo;
import com.tle.beans.mime.MimeEntry;
import com.tle.beans.security.ACLEntryMapping;
import com.tle.beans.security.AccessEntry;
import com.tle.beans.security.AccessExpression;
import com.tle.beans.security.SharePass;
import com.tle.beans.user.TLEGroup;
import com.tle.beans.user.TLEUser;
import com.tle.beans.user.UserInfoBackup;
import com.tle.common.security.TargetListEntry;
import com.tle.common.workflow.Workflow;
import com.tle.common.workflow.WorkflowItemStatus;
import com.tle.common.workflow.WorkflowNodeStatus;
import com.tle.common.workflow.node.DecisionNode;
import com.tle.common.workflow.node.ParallelNode;
import com.tle.common.workflow.node.ScriptNode;
import com.tle.common.workflow.node.SerialNode;
import com.tle.common.workflow.node.WorkflowItem;
import com.tle.common.workflow.node.WorkflowNode;
import com.tle.core.db.DBSchema$;
import com.tle.core.facetedsearch.bean.FacetedSearchClassification;
import com.tle.core.guice.Bind;
import com.tle.core.hibernate.impl.AllDataHibernateMigrationFilter;
import com.tle.core.hibernate.impl.HibernateCreationFilter;
import com.tle.core.hibernate.impl.HibernateMigrationHelper;
import com.tle.core.migration.AbstractCreateMigration;
import com.tle.core.migration.MigrationInfo;
import com.tle.core.migration.beans.SystemConfig;
import com.tle.core.plugins.PluginService;
import com.tle.core.plugins.PluginTracker;
import com.tle.web.resources.PluginResourceHelper;
import com.tle.web.resources.ResourcesService;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.hibernate.classic.Session;
import org.java.plugin.registry.Extension;
import org.java.plugin.registry.Extension.Parameter;
@Bind
@Singleton
public class InitialSchema extends AbstractCreateMigration {
private PluginTracker<Object> initialTracker;
private static PluginResourceHelper r = ResourcesService.getResourceHelper(InitialSchema.class);
private static Class<?>[] clazzes =
new Class<?>[] {
ConfigurationProperty.class,
ItemDefinitionScript.class,
SchemaScript.class,
Institution.class,
Language.class,
Staging.class,
UserPreference.class,
UserPreference.UserPrefKey.class,
BaseEntity.class,
LanguageBundle.class,
LanguageString.class,
EntityLock.class,
ItemDefinition.class,
ItemdefBlobs.class,
Workflow.class,
WorkflowNodeStatus.class,
WorkflowItemStatus.class,
DecisionNode.class,
ScriptNode.class,
ParallelNode.class,
SerialNode.class,
WorkflowItem.class,
WorkflowNode.class,
ReferencedURL.class,
Comment.class,
DrmAcceptance.class,
HistoryEvent.class,
Item.class,
ItemXml.class,
DrmSettings.class,
ItemLock.class,
ModerationStatus.class,
CourseInfo.class,
Attachment.class,
IMSResourceAttachment.class,
FileAttachment.class,
HtmlAttachment.class,
ImsAttachment.class,
CustomAttachment.class,
LinkAttachment.class,
ZipAttachment.class,
ItemNavigationNode.class,
ItemNavigationTab.class,
NavigationSettings.class,
AccessEntry.class,
AccessExpression.class,
SharePass.class,
TLEUser.class,
TLEGroup.class,
UserInfoBackup.class,
FacetedSearchClassification.class,
Relation.class,
Bookmark.class,
MimeEntry.class,
ActivateRequest.class,
TargetListEntry.class,
VersionSelection.class,
BaseEntity.Attribute.class,
ACLEntryMapping.class
};
@SuppressWarnings("nls")
@Override
public MigrationInfo createMigrationInfo() {
return new MigrationInfo(r.key("initial.title"), r.key("initial.description"));
}
@Override
protected Class<?>[] getDomainClasses() {
List<Class<?>> classList = new ArrayList<Class<?>>(Arrays.asList(clazzes));
List<Extension> extensions = initialTracker.getExtensions();
for (Extension extension : extensions) {
Collection<Parameter> tempClazzes = extension.getParameters("class"); // $NON-NLS-1$
for (Parameter parameter : tempClazzes) {
String clazzName = parameter.valueAsString();
classList.add(initialTracker.getClassForName(extension, clazzName));
}
}
return classList.toArray(new Class<?>[classList.size()]);
}
@Inject
public void setPluginService(PluginService pluginService) {
initialTracker =
new PluginTracker<Object>(
pluginService, "com.tle.core.migration", "initialSchema", null); // $NON-NLS-1$
}
@SuppressWarnings("nls")
@Override
protected void addExtraStatements(HibernateMigrationHelper helper, List<String> sql) {
sql.addAll(DBSchema$.MODULE$.schema().creationSQL());
sql.addAll(helper.getAddIndexesRaw("bookmark_keywords", "bookkeywords", "bookmark_id"));
sql.addAll(
helper.getAddIndexesRaw(
"tlegroup_users",
new String[] {"tleguGroup", "tlegroup_id"},
new String[] {"tleguElem", "element"}));
sql.addAll(helper.getAddIndexesRaw("tlegroup_all_parents", "tlegap_parent", "all_parents_id"));
sql.addAll(helper.getAddIndexesRaw("item_collaborators", "ic_item", "item_id"));
sql.addAll(helper.getAddIndexesRaw("item_notifications", "in_item", "item_id"));
sql.addAll(helper.getAddIndexesRaw("base_entity_attributes", "bea_entity", "base_entity_id"));
sql.addAll(
helper.getAddIndexesRaw(
"access_expression_expression_p", "aeep_aexp", "access_expression_id"));
sql.addAll(helper.getAddIndexesRaw("workflow_node_users", "wnu_node", "workflow_node_id"));
sql.addAll(helper.getAddIndexesRaw("workflow_node_groups", "wng_node", "workflow_node_id"));
sql.addAll(helper.getAddIndexesRaw("workflow_node_roles", "wnr_node", "workflow_node_id"));
sql.addAll(
helper.getAddIndexesRaw("workflow_node_auto_assigns", "wnaa_node", "workflow_node_id"));
sql.addAll(
helper.getAddIndexesRaw(
"workflow_node_status_accepted", "taskAcceptedNode", "workflow_node_status_id"));
sql.addAll(helper.getAddIndexesRaw("mime_entry_extensions", "mee_mime", "mime_entry_id"));
List<Extension> extensions = initialTracker.getExtensions();
for (Extension extension : extensions) {
Collection<Parameter> indexes = extension.getParameters("index");
for (Parameter indexParam : indexes) {
String table = indexParam.getSubParameter("table").valueAsString();
String name = indexParam.getSubParameter("name").valueAsString();
Collection<Parameter> cols = indexParam.getSubParameters("column");
String function =
indexParam.getSubParameter("function") != null
? indexParam.getSubParameter("function").valueAsString()
: null;
String[] index = new String[cols.size() + 1];
index[0] = name;
int i = 1;
for (Parameter col : cols) {
index[i++] = col.valueAsString();
}
if (function == null) {
sql.addAll(helper.getAddIndexesRaw(table, index));
} else {
sql.addAll(helper.getAddFunctionIndexes(table, function, index));
}
}
}
}
@Override
protected HibernateCreationFilter getFilter(HibernateMigrationHelper helper) {
AllDataHibernateMigrationFilter filter = new AllDataHibernateMigrationFilter();
Session session = helper.getFactory().openSession();
if (helper.tableExists(session, SystemConfig.TABLE_NAME)) {
filter.setIncludeGenerators(false);
}
session.close();
return filter;
}
}
|
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Copyright (c) Microsoft Corporation. All rights reserved.
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#ifndef _DRIVERS_STATE_DEBOUNCE_DECL_H_
#define _DRIVERS_STATE_DEBOUNCE_DECL_H_ 1
//--//
struct HAL_STATE_DEBOUNCE
{
private:
UINT32 m_debounceTime_uSec;
HAL_COMPLETION m_callback;
public:
void Initialize( UINT32 debounce_uSec, HAL_CALLBACK_FPN isr );
void Change( UINT32 state );
void Abort();
};
//--//
#endif // _DRIVERS_STATE_DEBOUNCE_DECL_H_
|
import wave
import pandas as pd
import numpy as np
from scipy.io import wavfile
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
# Load the data
data = pd.read_csv("dataset.csv")
# Extract the audio samples and labels as numpy arrays
samples = np.array([wavfile.read(f)[1] for f in data["Audio Sample"]])
labels = np.array(data["Label"])
# Split datasets into training and test
X_train, X_test, y_train, y_test = train_test_split(samples, labels, test_size=0.2)
# Create and train a Random Forest classifier
clf = RandomForestClassifier(n_estimators=100)
clf.fit(X_train, y_train)
# Measure accuracy
accuracy = clf.score(X_test, y_test)
print('Model accuracy:', accuracy) |
#!/usr/bin/env bash
set -euo pipefail
IFS=$'\t\n'
: "${F7D2D:?Please export F7D2D with 7D2D install folder}"
cd "${F7D2D}/previews"
if [[ $# -lt 1 ]]; then
# word splitting is intended
# shellcheck disable=SC2046
set -- $(compgen -f -X '!*.zip' | sed -nr '/-[0-9]+.zip/ s/.*-([0-9]+).zip/\1/p' | sort -u)
fi
for SIZE; do
echo "Extracting ${SIZE}"
mkdir -p "${SIZE}-previews"
for map in *"${SIZE}.zip"; do
if [[ ! -f ${SIZE}-previews/${map%.zip}.png ]]; then
unzip "$map" \
"${map%.zip}.png" \
"thumbs/${map%.zip}.png" \
-d "${SIZE}-previews"
fi
done
done
|
#! /bin/bash --posix
set -eu
readonly bin="$GOPATH/bin/scotty"
readonly target="/tmp/$LOGNAME/scotty.tar.gz"
(cd $GOPATH/src; go install github.com/Symantec/scotty/...)
mkdir -p ${target%/*}
cd $GOPATH/src/github.com/Symantec/scotty
tar --owner=0 --group=0 -czf $target \
init.d/scotty.* \
"$@" \
-C $PWD/apps/scotty apps.yaml health-check.yml install.sh \
-C $GOPATH bin/scotty
|
<filename>tapestry-core/src/main/java/org/apache/tapestry5/internal/model/ParameterModelImpl.java
// Copyright 2006, 2008, 2009, 2011 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.internal.model;
import org.apache.tapestry5.model.ComponentModel;
import org.apache.tapestry5.model.ParameterModel;
public class ParameterModelImpl implements ParameterModel
{
private final ComponentModel componentModel;
private final String name;
private final boolean required;
private final boolean allowNull;
private final String defaultBindingPrefix;
private final boolean cached;
public ParameterModelImpl(ComponentModel componentModel, String name, boolean required, boolean allowNull, String defaultBindingPrefix, boolean cached)
{
this.componentModel = componentModel;
this.name = name;
this.required = required;
this.allowNull = allowNull;
this.defaultBindingPrefix = defaultBindingPrefix;
this.cached = cached;
}
public String getName()
{
return name;
}
public boolean isRequired()
{
return required;
}
public String getDefaultBindingPrefix()
{
return defaultBindingPrefix;
}
public boolean isAllowNull()
{
return allowNull;
}
public boolean isCached()
{
return cached;
}
public ComponentModel getComponentModel()
{
return componentModel;
}
}
|
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_PLATFORM=Gnueabi-Windows
CND_CONF=Release
CND_DISTDIR=dist
CND_BUILDDIR=build
CND_DLIB_EXT=dll
NBTMPDIR=${CND_BUILDDIR}/${CND_CONF}/${CND_PLATFORM}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=dist/V3/App/modem_ppp
OUTPUT_BASENAME=modem_ppp
PACKAGE_TOP_DIR=modemppp/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package
rm -rf ${NBTMPDIR}
mkdir -p ${NBTMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory "${NBTMPDIR}/modemppp/bin"
copyFileToTmpDir "${OUTPUT_PATH}.exe" "${NBTMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}.exe" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/modemppp.tar
cd ${NBTMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/modemppp.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${NBTMPDIR}
|
<reponame>chulqyfauzan/SmartHome
package models;
public interface SensorManager2 {
final int jumlah = 100;
static int no = 10;
public abstract void sensorname();
public abstract void sensorno();
}
|
poetry run python run.py config.json
|
package io.quarkus.micrometer.deployment.binder;
import javax.enterprise.inject.Instance;
import javax.inject.Inject;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import io.quarkus.micrometer.runtime.binder.HttpBinderConfiguration;
import io.quarkus.micrometer.runtime.binder.vertx.VertxMeterBinderAdapter;
import io.quarkus.test.QuarkusUnitTest;
import io.vertx.core.http.HttpServerOptions;
import io.vertx.core.net.SocketAddress;
public class VertxWithHttpDisabledTest {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("test-logging.properties")
.overrideConfigKey("quarkus.micrometer.binder-enabled-default", "false")
.overrideConfigKey("quarkus.micrometer.binder.vertx.enabled", "true")
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class));
@Inject
Instance<VertxMeterBinderAdapter> vertxMeterBinderAdapterInstance;
@Inject
HttpBinderConfiguration httpBinderConfiguration;
@Test
public void testVertxMetricsWithoutHttp() throws Exception {
Assertions.assertTrue(vertxMeterBinderAdapterInstance.isResolvable());
VertxMeterBinderAdapter adapter = vertxMeterBinderAdapterInstance.get();
// HttpServerMetrics should not be created (null returned) because
// Http server metrics are disabled
Assertions.assertNull(adapter.createHttpServerMetrics(new HttpServerOptions(), new SocketAddress() {
@Override
public String host() {
return "a.b.c";
}
@Override
public int port() {
return 0;
}
@Override
public String path() {
return null;
}
}));
Assertions.assertFalse(httpBinderConfiguration.isServerEnabled());
}
}
|
def func(a):
result = [x + 5 for x in a]
return result |
#!/bin/sh
# This shell script runs the ncdump tests.
# $Id: run_nc4_tests.sh,v 1.4 2010/05/18 20:05:23 dmh Exp $
if test "x$srcdir" = x ; then srcdir="."; fi
echo "*** Testing ncgen for netCDF-4."
set -e
echo "*** creating netCDF-4 file c0_4.nc from c0_4.cdl..."
./ncgen -k nc4 -b -o c0_4.nc $srcdir/c0_4.cdl
echo "*** creating netCDF-4 classic model file c0_4c.nc from c0.cdl..."
./ncgen -k nc7 -b -o c0_4c.nc $srcdir/c0.cdl
echo "*** creating C code for CAM file ref_camrun.cdl..."
./ncgen -lc $srcdir/ref_camrun.cdl >ref_camrun.c
echo "*** test for jira NCF-199 bug"
./ncgen -k nc4 $srcdir/ncf199.cdl
echo "*** Test successful!"
exit 0
|
<filename>spring-boot-tony-starters-example/tony-mybatis-plus/src/main/java/com/example/tony/mp/mapper/UserMapper.java<gh_stars>100-1000
package com.example.tony.mp.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.example.tony.mp.entity.User;
import com.tony.mybatis.plus.mapper.TonyMapper;
/**
* @Entity com.tony.mybatis.example.entity.User
*/
public interface UserMapper extends TonyMapper<User> {
}
|
'use strict';
// Declare app level module which depends on views, and components
var app = angular.module('myApp', [
'ngRoute',
'ngCookies',
'textAngular',
'myApp.view1',
'myApp.view2',
'myApp.version',
'myApp.notes',
'myApp.login'
]);
app.config(['$routeProvider', function($routeProvider) {
$routeProvider.otherwise({redirectTo: '/login'});
}]);
// <input focus-on="noteCleared">
app.directive('focusOn', function() {
return function(scope, element, attributes) {
scope.$on(attributes.focusOn, function() {
element[0].focus();
});
};
});
app.service('NotesBackend', function($http, $cookies) {
var apiBasePath = 'https://elevennote-nov-2014.herokuapp.com/api/v1/';
var postNotePath = apiBasePath + 'notes';
var notes = [];
var user = $cookies.user ? JSON.parse($cookies.user) : {};
this.getNotes = function() {
return notes;
};
this.getUser = function() {
return user;
};
this.deleteCookie = function() {
delete $cookies.user;
user = {};
notes = [];
};
this.fetchUser = function(user, callback) {
// /api/v1/session (expecting { "user": { "username": "dave", "password": "<PASSWORD>" } } as POST)
var self = this;
$http.post(apiBasePath + 'session', {
user: {
username: user.username,
password: <PASSWORD>
}
}).success(function(userData) {
user = userData;
if (userData.id) {
// '{"username":"djones", ..}'
$cookies.user = JSON.stringify(user);
self.fetchNotes();
}
callback(userData);
});
};
this.fetchNotes = function() {
if (user.api_key) {
$http.get(apiBasePath + 'notes.json?api_key=' + user.api_key).success(function(noteData){
notes = noteData;
});
}
};
this.postNote = function(note) {
$http.post(postNotePath, {
api_key: user.api_key,
note: {
title: note.title,
body_html: note.body_html
}
}).success(function(noteData) {
notes.unshift(noteData);
});
};
this.deleteNote = function(note) {
var self = this;
$http.delete(apiBasePath + 'notes/' + note.id + '?api_key=' + user.api_key)
.success(function() {
self.fetchNotes();
});
};
this.replaceNote = function(note) {
for(var i=0; i < notes.length; i++) {
if (notes[i].id === note.id) {
notes[i] = note;
}
}
};
this.updateNote = function(note) {
var self = this;
$http.put(apiBasePath + 'notes/' + note.id, {
api_key: user.api_key,
note: note
}).success(function(newNoteData) {
self.replaceNote(newNoteData);
})
};
});
|
<reponame>sintefneodroid/droid
var searchData=
[
['icon',['Icon',['../classdroid_1_1_runtime_1_1_utilities_1_1_game_objects_1_1_status_displayer_1_1_render_texture_list_item.html#aad131a75b13932ac842530b147429a79',1,'droid::Runtime::Utilities::GameObjects::StatusDisplayer::RenderTextureListItem']]]
];
|
/home3/redwards/opt/iq-tree/current/bin/iqtree -nt 16 -s seqs.A.rc.trim.aln
|
package tcg.credential;
import org.bouncycastle.asn1.ASN1EncodableVector;
import org.bouncycastle.asn1.ASN1Object;
import org.bouncycastle.asn1.ASN1Primitive;
import org.bouncycastle.asn1.ASN1Sequence;
import org.bouncycastle.asn1.ASN1TaggedObject;
import org.bouncycastle.asn1.DERSequence;
import org.bouncycastle.asn1.DERTaggedObject;
import org.bouncycastle.asn1.DERUTF8String;
/**
* Class name is "PlatformProperties" as opposed to TCG given name of "Properties"
* to limit potential collision issues with built-in Java class named Properties.
*
* <pre>
* Properties ::= SEQUENCE {
* propertyName UTF8String (SIZE (1..STRMAX)),
* propertyValue UTF8String (SIZE (1..STRMAX)),
* status [0] IMPLICIT AttributeStatus OPTIONAL }
* </pre>
*/
public class PlatformPropertiesV2 extends ASN1Object {
// minimum 2, max 3
DERUTF8String propertyName;
DERUTF8String propertyValue;
AttributeStatus status = null; // optional, tagged 0
public static PlatformPropertiesV2 getInstance(Object obj) {
if (obj == null || obj instanceof PlatformPropertiesV2) {
return (PlatformPropertiesV2) obj;
}
if (obj instanceof ASN1Sequence) {
return new PlatformPropertiesV2((ASN1Sequence)obj);
}
throw new IllegalArgumentException("Illegal argument in getInstance: " + obj.getClass().getName());
}
private PlatformPropertiesV2(ASN1Sequence seq) {
if ((seq.size() < 2) || (seq.size() > 3)) {
throw new IllegalArgumentException("Bad sequence size: " + seq.size());
}
ASN1Object[] elements = (ASN1Object[]) seq.toArray();
if (elements[0] instanceof DERUTF8String) {
propertyName = (DERUTF8String) elements[0];
if (propertyName.toString().length() > Definitions.STRMAX) {
throw new IllegalArgumentException("Length of propertyName exceeds STRMAX");
}
} else {
throw new IllegalArgumentException("Expected TCGSpecificationVersion, received " + elements[0].getClass().getName());
}
if (elements[1] instanceof DERUTF8String) {
propertyValue = (DERUTF8String) elements[1];
if (propertyValue.toString().length() > Definitions.STRMAX) {
throw new IllegalArgumentException("Length of propertyValue exceeds STRMAX");
}
} else {
throw new IllegalArgumentException("Expected DEROctetString, received " + elements[0].getClass().getName());
}
int pos = 2;
if (((elements.length - pos) > 0) && (elements[pos] instanceof ASN1TaggedObject) && (((ASN1TaggedObject)elements[pos]).getTagNo() == 0)) {
if ((ASN1Object)((ASN1TaggedObject)elements[pos]).getObject() instanceof AttributeStatus) {
status = (AttributeStatus)(ASN1Object)((ASN1TaggedObject)elements[pos]).getObject();
} else {
throw new IllegalArgumentException("Expected AttributeStatus object, but received " + elements[pos].getClass().getName());
}
pos++;
}
}
public PlatformPropertiesV2(DERUTF8String propertyName, DERUTF8String propertyValue) {
this(propertyName, propertyValue, null);
}
public PlatformPropertiesV2(DERUTF8String propertyName, DERUTF8String propertyValue, AttributeStatus status) {
if (propertyName.toString().length() > Definitions.STRMAX) {
throw new IllegalArgumentException("Length of propertyName exceeds STRMAX");
}
if (propertyValue.toString().length() > Definitions.STRMAX) {
throw new IllegalArgumentException("Length of propertyValue exceeds STRMAX");
}
this.propertyName = propertyName;
this.propertyValue = propertyValue;
this.status = status;
}
public ASN1Primitive toASN1Primitive() {
ASN1EncodableVector vec = new ASN1EncodableVector();
vec.add(propertyName);
vec.add(propertyValue);
if (status != null) {
vec.add(new DERTaggedObject(false, 0, status));
}
return new DERSequence(vec);
}
public DERUTF8String getPropertyName() {
return propertyName;
}
public DERUTF8String getPropertyValue() {
return propertyValue;
}
public AttributeStatus getStatus() {
return status;
}
}
|
package com.bv.eidss;
import android.content.ContentProvider;
import android.content.ContentValues;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import android.net.Uri;
import com.bv.eidss.data.EidssDatabase;
public class RegionsProvider extends ContentProvider{
static final String PROVIDER_NAME = "com.bv.eidss.regionsprovider";
static final String URL = "content://" + PROVIDER_NAME + "/regions";
public static final Uri CONTENT_URI = Uri.parse(URL);
public static final String NAME = "name";
static final int REGIONS = 1;
static final int REGION_ID = 2;
static final UriMatcher uriMatcher;
static{
uriMatcher = new UriMatcher(UriMatcher.NO_MATCH);
uriMatcher.addURI(PROVIDER_NAME, "regions", REGIONS);
uriMatcher.addURI(PROVIDER_NAME, "regions/#", REGION_ID);
}
private SQLiteDatabase db;
@Override
public boolean onCreate() {
/*EidssDatabase mDb = new EidssDatabase(getContext());
db = mDb.getReadableDatabase();
return (db == null)? false:true;*/
return true;
}
@Override
public Uri insert(Uri uri, ContentValues values) {
throw new SQLException("Failed to add a record into " + uri);
}
@Override
public Cursor query(Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
if (db == null) {
EidssDatabase mDb = new EidssDatabase(getContext());
db = mDb.getReadableDatabase();
}
switch (uriMatcher.match(uri)) {
case REGIONS:
return db.rawQuery(EidssDatabase.select_sql_regions, selectionArgs);
case REGION_ID:
return db.rawQuery(EidssDatabase.select_sql_region, selectionArgs);
default:
throw new IllegalArgumentException("Wrong URI: " + uri);
}
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
throw new IllegalArgumentException("Unknown URI " + uri);
}
@Override
public int update(Uri uri, ContentValues values, String selection,
String[] selectionArgs) {
throw new IllegalArgumentException("Unknown URI " + uri );
}
@Override
public String getType(Uri uri) {
throw new IllegalArgumentException("Unsupported URI: " + uri);
}
}
|
<gh_stars>1-10
module BlueberryRedactor
class File < Attachment
has_one_attached :file
end
end
|
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_CONF=default
CND_DISTDIR=dist
TMPDIR=build/${CND_CONF}/${IMAGE_TYPE}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=dist/${CND_CONF}/${IMAGE_TYPE}/servito.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
OUTPUT_BASENAME=servito.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
PACKAGE_TOP_DIR=servito.x/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/package
rm -rf ${TMPDIR}
mkdir -p ${TMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory ${TMPDIR}/servito.x/bin
copyFileToTmpDir "${OUTPUT_PATH}" "${TMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/package/servito.x.tar
cd ${TMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/package/servito.x.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${TMPDIR}
|
<reponame>nbarrientos/puppetlabs-stdlib<filename>spec/functions/to_bytes_spec.rb
# frozen_string_literal: true
require 'spec_helper'
describe 'to_bytes' do
it { is_expected.not_to eq(nil) }
it { is_expected.to run.with_params.and_raise_error(Puppet::ParseError, %r{wrong number of arguments}i) }
it { is_expected.to run.with_params('1', 'extras').and_raise_error(Puppet::ParseError, %r{wrong number of arguments}i) }
it { is_expected.to run.with_params([]).and_raise_error(TypeError, %r{(can't convert|no implicit conversion of) Array (in)?to String}) }
it { is_expected.to run.with_params({}).and_raise_error(TypeError, %r{(can't convert|no implicit conversion of) Hash (in)?to String}) }
it { is_expected.to run.with_params(true).and_raise_error(TypeError, %r{(can't convert|no implicit conversion of) (TrueClass|true) (in)?to String}) }
describe 'when passing numbers' do
it { is_expected.to run.with_params(0).and_return(0) }
it { is_expected.to run.with_params(1).and_return(1) }
it { is_expected.to run.with_params(-1).and_return(-1) }
it { is_expected.to run.with_params(1.1).and_return(1.1) }
it { is_expected.to run.with_params(-1.1).and_return(-1.1) }
end
describe 'when passing numbers as strings' do
describe 'without a unit' do
it { is_expected.to run.with_params('1').and_return(1) }
it { is_expected.to run.with_params('-1').and_return(-1) }
# these are so wrong
it { is_expected.to run.with_params('1.1').and_return(1) }
it { is_expected.to run.with_params('-1.1').and_return(-1) }
end
describe 'with a unit' do
it { is_expected.to run.with_params('1k').and_return(1024) }
it { is_expected.to run.with_params('-1kB').and_return(-1024) }
it { is_expected.to run.with_params('1M').and_return(1024 * 1024) }
it { is_expected.to run.with_params('1G').and_return(1024 * 1024 * 1024) }
it { is_expected.to run.with_params('1T').and_return(1024 * 1024 * 1024 * 1024) }
it { is_expected.to run.with_params('1P').and_return(1024 * 1024 * 1024 * 1024 * 1024) }
it { is_expected.to run.with_params('1E').and_return(1024 * 1024 * 1024 * 1024 * 1024 * 1024) }
it { is_expected.to run.with_params('1.5e3M').and_return(1_572_864_000) }
it { is_expected.to run.with_params('4k').and_return(4 * 1024) }
it { is_expected.to run.with_params('-4kB').and_return(4 * -1024) }
it { is_expected.to run.with_params('4M').and_return(4 * 1024 * 1024) }
it { is_expected.to run.with_params('4G').and_return(4 * 1024 * 1024 * 1024) }
it { is_expected.to run.with_params('4T').and_return(4 * 1024 * 1024 * 1024 * 1024) }
it { is_expected.to run.with_params('4P').and_return(4 * 1024 * 1024 * 1024 * 1024 * 1024) }
it { is_expected.to run.with_params('4E').and_return(4 * 1024 * 1024 * 1024 * 1024 * 1024 * 1024) }
# these are so wrong
it { is_expected.to run.with_params('1.0001 k').and_return(1024) }
it { is_expected.to run.with_params('-1.0001 kB').and_return(-1024) }
end
describe 'with a unknown unit' do
it { is_expected.to run.with_params('1KB').and_raise_error(Puppet::ParseError, %r{Unknown prefix}) }
it { is_expected.to run.with_params('1K').and_raise_error(Puppet::ParseError, %r{Unknown prefix}) }
it { is_expected.to run.with_params('1mb').and_raise_error(Puppet::ParseError, %r{Unknown prefix}) }
it { is_expected.to run.with_params('1m').and_raise_error(Puppet::ParseError, %r{Unknown prefix}) }
it { is_expected.to run.with_params('1%').and_raise_error(Puppet::ParseError, %r{Unknown prefix}) }
it { is_expected.to run.with_params('1 p').and_raise_error(Puppet::ParseError, %r{Unknown prefix}) }
end
end
# these are so wrong
describe 'when passing random stuff' do
it { is_expected.to run.with_params('-1....1').and_return(-1) }
it { is_expected.to run.with_params('-1.e.e.e.1').and_return(-1) }
it { is_expected.to run.with_params('-1+1').and_return(-1) }
it { is_expected.to run.with_params('1-1').and_return(1) }
it { is_expected.to run.with_params('1 kaboom').and_return(1024) }
it { is_expected.to run.with_params('kaboom').and_return(0) }
end
end
|
package dev.webfx.kit.mapper.peers.javafxcontrols.gwt.html;
import elemental2.dom.HTMLElement;
import javafx.scene.control.Hyperlink;
import dev.webfx.kit.mapper.peers.javafxcontrols.base.ButtonBasePeerBase;
import dev.webfx.kit.mapper.peers.javafxcontrols.base.ButtonBasePeerMixin;
import dev.webfx.kit.mapper.peers.javafxgraphics.gwt.html.layoutmeasurable.HtmlLayoutMeasurableNoGrow;
import dev.webfx.kit.mapper.peers.javafxgraphics.gwt.util.HtmlUtil;
/**
* @author <NAME>
*/
public final class HtmlHyperlinkPeer
<N extends Hyperlink, NB extends ButtonBasePeerBase<N, NB, NM>, NM extends ButtonBasePeerMixin<N, NB, NM>>
extends HtmlButtonBasePeer<N, NB, NM>
implements HtmlLayoutMeasurableNoGrow {
public HtmlHyperlinkPeer() {
this((NB) new ButtonBasePeerBase(), HtmlUtil.createElement("a"));
}
public HtmlHyperlinkPeer(NB base, HTMLElement element) {
super(base, element);
element.setAttribute("href", "#");
element.onclick = e -> {
e.preventDefault();
return null;
};
}
}
|
# author: h.serimer 04.2021 https://github.com/eproje/uPy_Course
# Board: Lolin32 Lite
# OLED ile basit osiloskop
from machine import * #machine kutuphanesindeki tum objeler
import ssd1306
import gc
#hardware I2C-0, overclock(1.2Mhz)
oled = ssd1306.SSD1306_I2C(128, 64, I2C(0,freq=1200000))
#ADC input
pot = ADC(Pin(34))
pot.atten(ADC.ATTN_11DB) #Full range: 3.3v
freq(240000000) #overclock, normalde 160Mhz
y_list = [63] * 128 #128 elemanli baslangic degeri 63 olan dizi OSC gorunumu icin
oled.fill(0)#oled buffer temizlensin onceki calismalardan birseyler kalmis olabilir
print("Program basladi")
while True:
y = 63-int((pot.read() * 63)/4095)
y_list[127] = y
for x in range(0, 127):
oled.pixel(x,y_list[x],0) #oncekini sil
y_list[x] = y_list[x+1] #1 pixel kaydir
oled.pixel(x,y_list[x], 1)#yeniyi yukle
oled.show()#oled tazele/yazdir
gc.collect()#hafiza temizlensin
|
<gh_stars>1-10
const fs = require('fs');
const path = require('path');
const webpack = require('webpack');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const FaviconsWebpackPlugin = require('favicons-webpack-plugin');
const ExtractTextWebpackPlugin = require('extract-text-webpack-plugin');
const host = process.env.host || 'localhost';
const port = parseInt(process.env.port, 10) + 1;
const version = require('./package').version;
const babelrc = fs.readFileSync('./.babelrc');
let babelrcObject = {};
try {
babelrcObject = JSON.parse(babelrc);
} catch (e) {
console.error('===> Error parsing babelrc', e);
}
module.exports = {
devtool: 'source-map',
context: path.resolve(__dirname, './src'),
entry: {
'app': [
path.join(__dirname, 'src/app.js')
]
},
output: {
path: path.resolve(__dirname, 'dist/'),
filename: '[name].' + version + '.min.js',
publicPath: '/'
},
module: {
rules: [{
test: /\.js$/,
exclude: /node_modules/,
use: [{
loader: 'babel-loader',
options: babelrcObject
}, {
loader: 'eslint-loader'
}]
}, {
test: /\.json$/,
use: [{
loader: 'json-loader'
}]
}, {
test: /\.css$/,
use: [{
loader: 'style-loader'
}, {
loader: 'css-loader',
options: {
modules: true,
sourceMap: true,
localIdentName: '[local]__[hash:base64]',
minimize: true
}
}]
}]
},
resolve: {
modules: [
'src',
'node_modules'
],
extensions: [ '.json', '.js']
},
plugins: [
new HtmlWebpackPlugin({
template: 'index.html',
inject: 'body',
filename: 'index.html'
}),
new FaviconsWebpackPlugin(path.join(__dirname, 'src/boris.png')),
new ExtractTextWebpackPlugin('[name]-[hash].min.css'),
new webpack.optimize.UglifyJsPlugin({
compressor: {
warnings: false,
screw_ie8: true
}
}),
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV)
}),
new webpack.IgnorePlugin(/webpack-stats\.json$/)
]
};
|
<gh_stars>10-100
import Texture2D from 'qtek/src/Texture2D';
import Texture from 'qtek/src/Texture';
import workerFunc from './forceAtlas2Worker.js';
var workerUrl = workerFunc.toString();
workerUrl = workerUrl.slice(workerUrl.indexOf('{') + 1, workerUrl.lastIndexOf('}'));
var defaultConfigs = {
barnesHutOptimize: true,
barnesHutTheta: 1.5,
repulsionByDegree: true,
linLogMode: false,
strongGravityMode: false,
gravity: 1.0,
scaling: 1.0,
edgeWeightInfluence: 1.0,
jitterTolerence: 0.1,
preventOverlap: false,
dissuadeHubs: false,
gravityCenter: null
};
var ForceAtlas2 = function (options) {
for (var name in defaultConfigs) {
this[name] = defaultConfigs[name];
}
if (options) {
for (var name in options) {
this[name] = options[name];
}
}
this._nodes = [];
this._edges = [];
this._disposed = false;
this._positionTex = new Texture2D({
type: Texture.FLOAT,
flipY: false,
minFilter: Texture.NEAREST,
magFilter: Texture.NEAREST
});
};
ForceAtlas2.prototype.initData = function (nodes, edges) {
var bb = new Blob([workerUrl]);
var blobURL = window.URL.createObjectURL(bb);
this._worker = new Worker(blobURL);
this._worker.onmessage = this._$onupdate.bind(this);
this._nodes = nodes;
this._edges = edges;
this._frame = 0;
var nNodes = nodes.length;
var nEdges = edges.length;
var positionArr = new Float32Array(nNodes * 2);
var massArr = new Float32Array(nNodes);
var sizeArr = new Float32Array(nNodes);
var edgeArr = new Float32Array(nEdges * 2);
var edgeWeightArr = new Float32Array(nEdges);
for (var i = 0; i < nodes.length; i++) {
var node = nodes[i];
positionArr[i * 2] = node.x;
positionArr[i * 2 + 1] = node.y;
massArr[i] = node.mass == null ? 1 : node.mass;
sizeArr[i] = node.size == null ? 1 : node.size;
}
for (var i = 0; i < edges.length; i++) {
var edge = edges[i];
var source = edge.node1;
var target = edge.node2;
edgeArr[i * 2] = source;
edgeArr[i * 2 + 1] = target;
edgeWeightArr[i] = edge.weight == null ? 1 : edge.weight;
}
var textureWidth = Math.ceil(Math.sqrt(nodes.length));
var textureHeight = textureWidth;
var pixels = new Float32Array(textureWidth * textureHeight * 4);
var positionTex = this._positionTex;
positionTex.width = textureWidth;
positionTex.height = textureHeight;
positionTex.pixels = pixels;
this._worker.postMessage({
cmd: 'init',
nodesPosition: positionArr,
nodesMass: massArr,
nodesSize: sizeArr,
edges: edgeArr,
edgesWeight: edgeWeightArr
});
this._globalSpeed = Infinity;
};
ForceAtlas2.prototype.updateOption = function (options) {
var config = {};
// Default config
for (var name in defaultConfigs) {
config[name] = defaultConfigs[name];
}
var nodes = this._nodes;
var edges = this._edges;
// Config according to data scale
var nNodes = nodes.length;
if (nNodes > 50000) {
config.jitterTolerence = 10;
}
else if (nNodes > 5000) {
config.jitterTolerence = 1;
}
else {
config.jitterTolerence = 0.1;
}
if (nNodes > 100) {
config.scaling = 2.0;
}
else {
config.scaling = 10.0;
}
if (nNodes > 1000) {
config.barnesHutOptimize = true;
}
else {
config.barnesHutOptimize = false;
}
if (options) {
for (var name in defaultConfigs) {
if (options[name] != null) {
config[name] = options[name];
}
}
}
if (!config.gravityCenter) {
var min = [Infinity, Infinity];
var max = [-Infinity, -Infinity];
for (var i = 0; i < nodes.length; i++) {
min[0] = Math.min(nodes[i].x, min[0]);
min[1] = Math.min(nodes[i].y, min[1]);
max[0] = Math.max(nodes[i].x, max[0]);
max[1] = Math.max(nodes[i].y, max[1]);
}
config.gravityCenter = [(min[0] + max[0]) * 0.5, (min[1] + max[1]) * 0.5];
}
// Update inDegree, outDegree
for (var i = 0; i < edges.length; i++) {
var node1 = edges[i].node1;
var node2 = edges[i].node2;
nodes[node1].degree = (nodes[node1].degree || 0) + 1;
nodes[node2].degree = (nodes[node2].degree || 0) + 1;
}
if (this._worker) {
this._worker.postMessage({
cmd: 'updateConfig',
config: config
});
}
};
// Steps per call, to keep sync with rendering
ForceAtlas2.prototype.update = function (renderer, steps, cb) {
if (steps == null) {
steps = 1;
}
steps = Math.max(steps, 1);
this._frame += steps;
this._onupdate = cb;
if (this._worker) {
this._worker.postMessage({
cmd: 'update',
steps: Math.round(steps)
});
}
};
ForceAtlas2.prototype._$onupdate = function (e) {
// Incase the worker keep postMessage of last frame after it is disposed
if (this._disposed) {
return;
}
var positionArr = new Float32Array(e.data.buffer);
this._globalSpeed = e.data.globalSpeed;
this._positionArr = positionArr;
this._updateTexture(positionArr);
this._onupdate && this._onupdate();
};
ForceAtlas2.prototype.getNodePositionTexture = function () {
return this._positionTex;
};
ForceAtlas2.prototype.getNodeUV = function (nodeIndex, uv) {
uv = uv || [];
var textureWidth = this._positionTex.width;
var textureHeight = this._positionTex.height;
uv[0] = (nodeIndex % textureWidth) / (textureWidth - 1);
uv[1] = Math.floor(nodeIndex / textureWidth) / (textureHeight - 1);
return uv;
};
ForceAtlas2.prototype.getNodes = function () {
return this._nodes;
};
ForceAtlas2.prototype.getEdges = function () {
return this._edges;
};
ForceAtlas2.prototype.isFinished = function (maxSteps) {
return this._frame > maxSteps;
};
ForceAtlas2.prototype.getNodePosition = function (renderer, out) {
if (!out) {
out = new Float32Array(this._nodes.length * 2);
}
if (this._positionArr) {
for (var i = 0; i < this._positionArr.length; i++) {
out[i] = this._positionArr[i];
}
}
return out;
};
ForceAtlas2.prototype._updateTexture = function (positionArr) {
var pixels = this._positionTex.pixels;
var offset = 0;
for (var i = 0; i < positionArr.length;){
pixels[offset++] = positionArr[i++];
pixels[offset++] = positionArr[i++];
pixels[offset++] = 1;
pixels[offset++] = 1;
}
this._positionTex.dirty();
};
ForceAtlas2.prototype.dispose = function (renderer) {
this._disposed = true;
this._worker = null;
};
export default ForceAtlas2; |
package com.atjl.dbtiming.domain.gen;
import java.util.ArrayList;
import java.util.List;
public class GenTaskRunedExample {
protected String orderByClause;
protected boolean distinct;
protected List<Criteria> oredCriteria;
public GenTaskRunedExample() {
oredCriteria = new ArrayList<Criteria>();
}
public void setOrderByClause(String orderByClause) {
this.orderByClause = orderByClause;
}
public String getOrderByClause() {
return orderByClause;
}
public void setDistinct(boolean distinct) {
this.distinct = distinct;
}
public boolean isDistinct() {
return distinct;
}
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
public void or(Criteria criteria) {
oredCriteria.add(criteria);
}
public Criteria or() {
Criteria criteria = createCriteriaInternal();
oredCriteria.add(criteria);
return criteria;
}
public Criteria createCriteria() {
Criteria criteria = createCriteriaInternal();
if (oredCriteria.size() == 0) {
oredCriteria.add(criteria);
}
return criteria;
}
protected Criteria createCriteriaInternal() {
Criteria criteria = new Criteria();
return criteria;
}
public void clear() {
oredCriteria.clear();
orderByClause = null;
distinct = false;
}
protected abstract static class GeneratedCriteria {
protected List<Criterion> criteria;
protected GeneratedCriteria() {
super();
criteria = new ArrayList<Criterion>();
}
public boolean isValid() {
return criteria.size() > 0;
}
public List<Criterion> getAllCriteria() {
return criteria;
}
public List<Criterion> getCriteria() {
return criteria;
}
protected void addCriterion(String condition) {
if (condition == null) {
throw new RuntimeException("Value for condition cannot be null");
}
criteria.add(new Criterion(condition));
}
protected void addCriterion(String condition, Object value, String property) {
if (value == null) {
throw new RuntimeException("Value for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value));
}
protected void addCriterion(String condition, Object value1, Object value2, String property) {
if (value1 == null || value2 == null) {
throw new RuntimeException("Between values for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value1, value2));
}
public Criteria andTidIsNull() {
addCriterion("TID is null");
return (Criteria) this;
}
public Criteria andTidIsNotNull() {
addCriterion("TID is not null");
return (Criteria) this;
}
public Criteria andTidEqualTo(Long value) {
addCriterion("TID =", value, "tid");
return (Criteria) this;
}
public Criteria andTidNotEqualTo(Long value) {
addCriterion("TID <>", value, "tid");
return (Criteria) this;
}
public Criteria andTidGreaterThan(Long value) {
addCriterion("TID >", value, "tid");
return (Criteria) this;
}
public Criteria andTidGreaterThanOrEqualTo(Long value) {
addCriterion("TID >=", value, "tid");
return (Criteria) this;
}
public Criteria andTidLessThan(Long value) {
addCriterion("TID <", value, "tid");
return (Criteria) this;
}
public Criteria andTidLessThanOrEqualTo(Long value) {
addCriterion("TID <=", value, "tid");
return (Criteria) this;
}
public Criteria andTidIn(List<Long> values) {
addCriterion("TID in", values, "tid");
return (Criteria) this;
}
public Criteria andTidNotIn(List<Long> values) {
addCriterion("TID not in", values, "tid");
return (Criteria) this;
}
public Criteria andTidBetween(Long value1, Long value2) {
addCriterion("TID between", value1, value2, "tid");
return (Criteria) this;
}
public Criteria andTidNotBetween(Long value1, Long value2) {
addCriterion("TID not between", value1, value2, "tid");
return (Criteria) this;
}
public Criteria andTkeyIsNull() {
addCriterion("TKEY is null");
return (Criteria) this;
}
public Criteria andTkeyIsNotNull() {
addCriterion("TKEY is not null");
return (Criteria) this;
}
public Criteria andTkeyEqualTo(String value) {
addCriterion("TKEY =", value, "tkey");
return (Criteria) this;
}
public Criteria andTkeyNotEqualTo(String value) {
addCriterion("TKEY <>", value, "tkey");
return (Criteria) this;
}
public Criteria andTkeyGreaterThan(String value) {
addCriterion("TKEY >", value, "tkey");
return (Criteria) this;
}
public Criteria andTkeyGreaterThanOrEqualTo(String value) {
addCriterion("TKEY >=", value, "tkey");
return (Criteria) this;
}
public Criteria andTkeyLessThan(String value) {
addCriterion("TKEY <", value, "tkey");
return (Criteria) this;
}
public Criteria andTkeyLessThanOrEqualTo(String value) {
addCriterion("TKEY <=", value, "tkey");
return (Criteria) this;
}
public Criteria andTkeyLike(String value) {
addCriterion("TKEY like", value, "tkey");
return (Criteria) this;
}
public Criteria andTkeyNotLike(String value) {
addCriterion("TKEY not like", value, "tkey");
return (Criteria) this;
}
public Criteria andTkeyIn(List<String> values) {
addCriterion("TKEY in", values, "tkey");
return (Criteria) this;
}
public Criteria andTkeyNotIn(List<String> values) {
addCriterion("TKEY not in", values, "tkey");
return (Criteria) this;
}
public Criteria andTkeyBetween(String value1, String value2) {
addCriterion("TKEY between", value1, value2, "tkey");
return (Criteria) this;
}
public Criteria andTkeyNotBetween(String value1, String value2) {
addCriterion("TKEY not between", value1, value2, "tkey");
return (Criteria) this;
}
public Criteria andTserviceIsNull() {
addCriterion("TSERVICE is null");
return (Criteria) this;
}
public Criteria andTserviceIsNotNull() {
addCriterion("TSERVICE is not null");
return (Criteria) this;
}
public Criteria andTserviceEqualTo(String value) {
addCriterion("TSERVICE =", value, "tservice");
return (Criteria) this;
}
public Criteria andTserviceNotEqualTo(String value) {
addCriterion("TSERVICE <>", value, "tservice");
return (Criteria) this;
}
public Criteria andTserviceGreaterThan(String value) {
addCriterion("TSERVICE >", value, "tservice");
return (Criteria) this;
}
public Criteria andTserviceGreaterThanOrEqualTo(String value) {
addCriterion("TSERVICE >=", value, "tservice");
return (Criteria) this;
}
public Criteria andTserviceLessThan(String value) {
addCriterion("TSERVICE <", value, "tservice");
return (Criteria) this;
}
public Criteria andTserviceLessThanOrEqualTo(String value) {
addCriterion("TSERVICE <=", value, "tservice");
return (Criteria) this;
}
public Criteria andTserviceLike(String value) {
addCriterion("TSERVICE like", value, "tservice");
return (Criteria) this;
}
public Criteria andTserviceNotLike(String value) {
addCriterion("TSERVICE not like", value, "tservice");
return (Criteria) this;
}
public Criteria andTserviceIn(List<String> values) {
addCriterion("TSERVICE in", values, "tservice");
return (Criteria) this;
}
public Criteria andTserviceNotIn(List<String> values) {
addCriterion("TSERVICE not in", values, "tservice");
return (Criteria) this;
}
public Criteria andTserviceBetween(String value1, String value2) {
addCriterion("TSERVICE between", value1, value2, "tservice");
return (Criteria) this;
}
public Criteria andTserviceNotBetween(String value1, String value2) {
addCriterion("TSERVICE not between", value1, value2, "tservice");
return (Criteria) this;
}
public Criteria andParamIsNull() {
addCriterion("PARAM is null");
return (Criteria) this;
}
public Criteria andParamIsNotNull() {
addCriterion("PARAM is not null");
return (Criteria) this;
}
public Criteria andParamEqualTo(String value) {
addCriterion("PARAM =", value, "param");
return (Criteria) this;
}
public Criteria andParamNotEqualTo(String value) {
addCriterion("PARAM <>", value, "param");
return (Criteria) this;
}
public Criteria andParamGreaterThan(String value) {
addCriterion("PARAM >", value, "param");
return (Criteria) this;
}
public Criteria andParamGreaterThanOrEqualTo(String value) {
addCriterion("PARAM >=", value, "param");
return (Criteria) this;
}
public Criteria andParamLessThan(String value) {
addCriterion("PARAM <", value, "param");
return (Criteria) this;
}
public Criteria andParamLessThanOrEqualTo(String value) {
addCriterion("PARAM <=", value, "param");
return (Criteria) this;
}
public Criteria andParamLike(String value) {
addCriterion("PARAM like", value, "param");
return (Criteria) this;
}
public Criteria andParamNotLike(String value) {
addCriterion("PARAM not like", value, "param");
return (Criteria) this;
}
public Criteria andParamIn(List<String> values) {
addCriterion("PARAM in", values, "param");
return (Criteria) this;
}
public Criteria andParamNotIn(List<String> values) {
addCriterion("PARAM not in", values, "param");
return (Criteria) this;
}
public Criteria andParamBetween(String value1, String value2) {
addCriterion("PARAM between", value1, value2, "param");
return (Criteria) this;
}
public Criteria andParamNotBetween(String value1, String value2) {
addCriterion("PARAM not between", value1, value2, "param");
return (Criteria) this;
}
public Criteria andConfTypeIsNull() {
addCriterion("CONF_TYPE is null");
return (Criteria) this;
}
public Criteria andConfTypeIsNotNull() {
addCriterion("CONF_TYPE is not null");
return (Criteria) this;
}
public Criteria andConfTypeEqualTo(String value) {
addCriterion("CONF_TYPE =", value, "confType");
return (Criteria) this;
}
public Criteria andConfTypeNotEqualTo(String value) {
addCriterion("CONF_TYPE <>", value, "confType");
return (Criteria) this;
}
public Criteria andConfTypeGreaterThan(String value) {
addCriterion("CONF_TYPE >", value, "confType");
return (Criteria) this;
}
public Criteria andConfTypeGreaterThanOrEqualTo(String value) {
addCriterion("CONF_TYPE >=", value, "confType");
return (Criteria) this;
}
public Criteria andConfTypeLessThan(String value) {
addCriterion("CONF_TYPE <", value, "confType");
return (Criteria) this;
}
public Criteria andConfTypeLessThanOrEqualTo(String value) {
addCriterion("CONF_TYPE <=", value, "confType");
return (Criteria) this;
}
public Criteria andConfTypeLike(String value) {
addCriterion("CONF_TYPE like", value, "confType");
return (Criteria) this;
}
public Criteria andConfTypeNotLike(String value) {
addCriterion("CONF_TYPE not like", value, "confType");
return (Criteria) this;
}
public Criteria andConfTypeIn(List<String> values) {
addCriterion("CONF_TYPE in", values, "confType");
return (Criteria) this;
}
public Criteria andConfTypeNotIn(List<String> values) {
addCriterion("CONF_TYPE not in", values, "confType");
return (Criteria) this;
}
public Criteria andConfTypeBetween(String value1, String value2) {
addCriterion("CONF_TYPE between", value1, value2, "confType");
return (Criteria) this;
}
public Criteria andConfTypeNotBetween(String value1, String value2) {
addCriterion("CONF_TYPE not between", value1, value2, "confType");
return (Criteria) this;
}
public Criteria andConfCronExpressionIsNull() {
addCriterion("CONF_CRON_EXPRESSION is null");
return (Criteria) this;
}
public Criteria andConfCronExpressionIsNotNull() {
addCriterion("CONF_CRON_EXPRESSION is not null");
return (Criteria) this;
}
public Criteria andConfCronExpressionEqualTo(String value) {
addCriterion("CONF_CRON_EXPRESSION =", value, "confCronExpression");
return (Criteria) this;
}
public Criteria andConfCronExpressionNotEqualTo(String value) {
addCriterion("CONF_CRON_EXPRESSION <>", value, "confCronExpression");
return (Criteria) this;
}
public Criteria andConfCronExpressionGreaterThan(String value) {
addCriterion("CONF_CRON_EXPRESSION >", value, "confCronExpression");
return (Criteria) this;
}
public Criteria andConfCronExpressionGreaterThanOrEqualTo(String value) {
addCriterion("CONF_CRON_EXPRESSION >=", value, "confCronExpression");
return (Criteria) this;
}
public Criteria andConfCronExpressionLessThan(String value) {
addCriterion("CONF_CRON_EXPRESSION <", value, "confCronExpression");
return (Criteria) this;
}
public Criteria andConfCronExpressionLessThanOrEqualTo(String value) {
addCriterion("CONF_CRON_EXPRESSION <=", value, "confCronExpression");
return (Criteria) this;
}
public Criteria andConfCronExpressionLike(String value) {
addCriterion("CONF_CRON_EXPRESSION like", value, "confCronExpression");
return (Criteria) this;
}
public Criteria andConfCronExpressionNotLike(String value) {
addCriterion("CONF_CRON_EXPRESSION not like", value, "confCronExpression");
return (Criteria) this;
}
public Criteria andConfCronExpressionIn(List<String> values) {
addCriterion("CONF_CRON_EXPRESSION in", values, "confCronExpression");
return (Criteria) this;
}
public Criteria andConfCronExpressionNotIn(List<String> values) {
addCriterion("CONF_CRON_EXPRESSION not in", values, "confCronExpression");
return (Criteria) this;
}
public Criteria andConfCronExpressionBetween(String value1, String value2) {
addCriterion("CONF_CRON_EXPRESSION between", value1, value2, "confCronExpression");
return (Criteria) this;
}
public Criteria andConfCronExpressionNotBetween(String value1, String value2) {
addCriterion("CONF_CRON_EXPRESSION not between", value1, value2, "confCronExpression");
return (Criteria) this;
}
public Criteria andConfDelayTmIsNull() {
addCriterion("CONF_DELAY_TM is null");
return (Criteria) this;
}
public Criteria andConfDelayTmIsNotNull() {
addCriterion("CONF_DELAY_TM is not null");
return (Criteria) this;
}
public Criteria andConfDelayTmEqualTo(Long value) {
addCriterion("CONF_DELAY_TM =", value, "confDelayTm");
return (Criteria) this;
}
public Criteria andConfDelayTmNotEqualTo(Long value) {
addCriterion("CONF_DELAY_TM <>", value, "confDelayTm");
return (Criteria) this;
}
public Criteria andConfDelayTmGreaterThan(Long value) {
addCriterion("CONF_DELAY_TM >", value, "confDelayTm");
return (Criteria) this;
}
public Criteria andConfDelayTmGreaterThanOrEqualTo(Long value) {
addCriterion("CONF_DELAY_TM >=", value, "confDelayTm");
return (Criteria) this;
}
public Criteria andConfDelayTmLessThan(Long value) {
addCriterion("CONF_DELAY_TM <", value, "confDelayTm");
return (Criteria) this;
}
public Criteria andConfDelayTmLessThanOrEqualTo(Long value) {
addCriterion("CONF_DELAY_TM <=", value, "confDelayTm");
return (Criteria) this;
}
public Criteria andConfDelayTmIn(List<Long> values) {
addCriterion("CONF_DELAY_TM in", values, "confDelayTm");
return (Criteria) this;
}
public Criteria andConfDelayTmNotIn(List<Long> values) {
addCriterion("CONF_DELAY_TM not in", values, "confDelayTm");
return (Criteria) this;
}
public Criteria andConfDelayTmBetween(Long value1, Long value2) {
addCriterion("CONF_DELAY_TM between", value1, value2, "confDelayTm");
return (Criteria) this;
}
public Criteria andConfDelayTmNotBetween(Long value1, Long value2) {
addCriterion("CONF_DELAY_TM not between", value1, value2, "confDelayTm");
return (Criteria) this;
}
public Criteria andConfIntervalTmIsNull() {
addCriterion("CONF_INTERVAL_TM is null");
return (Criteria) this;
}
public Criteria andConfIntervalTmIsNotNull() {
addCriterion("CONF_INTERVAL_TM is not null");
return (Criteria) this;
}
public Criteria andConfIntervalTmEqualTo(Long value) {
addCriterion("CONF_INTERVAL_TM =", value, "confIntervalTm");
return (Criteria) this;
}
public Criteria andConfIntervalTmNotEqualTo(Long value) {
addCriterion("CONF_INTERVAL_TM <>", value, "confIntervalTm");
return (Criteria) this;
}
public Criteria andConfIntervalTmGreaterThan(Long value) {
addCriterion("CONF_INTERVAL_TM >", value, "confIntervalTm");
return (Criteria) this;
}
public Criteria andConfIntervalTmGreaterThanOrEqualTo(Long value) {
addCriterion("CONF_INTERVAL_TM >=", value, "confIntervalTm");
return (Criteria) this;
}
public Criteria andConfIntervalTmLessThan(Long value) {
addCriterion("CONF_INTERVAL_TM <", value, "confIntervalTm");
return (Criteria) this;
}
public Criteria andConfIntervalTmLessThanOrEqualTo(Long value) {
addCriterion("CONF_INTERVAL_TM <=", value, "confIntervalTm");
return (Criteria) this;
}
public Criteria andConfIntervalTmIn(List<Long> values) {
addCriterion("CONF_INTERVAL_TM in", values, "confIntervalTm");
return (Criteria) this;
}
public Criteria andConfIntervalTmNotIn(List<Long> values) {
addCriterion("CONF_INTERVAL_TM not in", values, "confIntervalTm");
return (Criteria) this;
}
public Criteria andConfIntervalTmBetween(Long value1, Long value2) {
addCriterion("CONF_INTERVAL_TM between", value1, value2, "confIntervalTm");
return (Criteria) this;
}
public Criteria andConfIntervalTmNotBetween(Long value1, Long value2) {
addCriterion("CONF_INTERVAL_TM not between", value1, value2, "confIntervalTm");
return (Criteria) this;
}
public Criteria andConfExeTimesIsNull() {
addCriterion("CONF_EXE_TIMES is null");
return (Criteria) this;
}
public Criteria andConfExeTimesIsNotNull() {
addCriterion("CONF_EXE_TIMES is not null");
return (Criteria) this;
}
public Criteria andConfExeTimesEqualTo(Long value) {
addCriterion("CONF_EXE_TIMES =", value, "confExeTimes");
return (Criteria) this;
}
public Criteria andConfExeTimesNotEqualTo(Long value) {
addCriterion("CONF_EXE_TIMES <>", value, "confExeTimes");
return (Criteria) this;
}
public Criteria andConfExeTimesGreaterThan(Long value) {
addCriterion("CONF_EXE_TIMES >", value, "confExeTimes");
return (Criteria) this;
}
public Criteria andConfExeTimesGreaterThanOrEqualTo(Long value) {
addCriterion("CONF_EXE_TIMES >=", value, "confExeTimes");
return (Criteria) this;
}
public Criteria andConfExeTimesLessThan(Long value) {
addCriterion("CONF_EXE_TIMES <", value, "confExeTimes");
return (Criteria) this;
}
public Criteria andConfExeTimesLessThanOrEqualTo(Long value) {
addCriterion("CONF_EXE_TIMES <=", value, "confExeTimes");
return (Criteria) this;
}
public Criteria andConfExeTimesIn(List<Long> values) {
addCriterion("CONF_EXE_TIMES in", values, "confExeTimes");
return (Criteria) this;
}
public Criteria andConfExeTimesNotIn(List<Long> values) {
addCriterion("CONF_EXE_TIMES not in", values, "confExeTimes");
return (Criteria) this;
}
public Criteria andConfExeTimesBetween(Long value1, Long value2) {
addCriterion("CONF_EXE_TIMES between", value1, value2, "confExeTimes");
return (Criteria) this;
}
public Criteria andConfExeTimesNotBetween(Long value1, Long value2) {
addCriterion("CONF_EXE_TIMES not between", value1, value2, "confExeTimes");
return (Criteria) this;
}
public Criteria andProcessorIsNull() {
addCriterion("PROCESSOR is null");
return (Criteria) this;
}
public Criteria andProcessorIsNotNull() {
addCriterion("PROCESSOR is not null");
return (Criteria) this;
}
public Criteria andProcessorEqualTo(String value) {
addCriterion("PROCESSOR =", value, "processor");
return (Criteria) this;
}
public Criteria andProcessorNotEqualTo(String value) {
addCriterion("PROCESSOR <>", value, "processor");
return (Criteria) this;
}
public Criteria andProcessorGreaterThan(String value) {
addCriterion("PROCESSOR >", value, "processor");
return (Criteria) this;
}
public Criteria andProcessorGreaterThanOrEqualTo(String value) {
addCriterion("PROCESSOR >=", value, "processor");
return (Criteria) this;
}
public Criteria andProcessorLessThan(String value) {
addCriterion("PROCESSOR <", value, "processor");
return (Criteria) this;
}
public Criteria andProcessorLessThanOrEqualTo(String value) {
addCriterion("PROCESSOR <=", value, "processor");
return (Criteria) this;
}
public Criteria andProcessorLike(String value) {
addCriterion("PROCESSOR like", value, "processor");
return (Criteria) this;
}
public Criteria andProcessorNotLike(String value) {
addCriterion("PROCESSOR not like", value, "processor");
return (Criteria) this;
}
public Criteria andProcessorIn(List<String> values) {
addCriterion("PROCESSOR in", values, "processor");
return (Criteria) this;
}
public Criteria andProcessorNotIn(List<String> values) {
addCriterion("PROCESSOR not in", values, "processor");
return (Criteria) this;
}
public Criteria andProcessorBetween(String value1, String value2) {
addCriterion("PROCESSOR between", value1, value2, "processor");
return (Criteria) this;
}
public Criteria andProcessorNotBetween(String value1, String value2) {
addCriterion("PROCESSOR not between", value1, value2, "processor");
return (Criteria) this;
}
public Criteria andTmutexIsNull() {
addCriterion("TMUTEX is null");
return (Criteria) this;
}
public Criteria andTmutexIsNotNull() {
addCriterion("TMUTEX is not null");
return (Criteria) this;
}
public Criteria andTmutexEqualTo(String value) {
addCriterion("TMUTEX =", value, "tmutex");
return (Criteria) this;
}
public Criteria andTmutexNotEqualTo(String value) {
addCriterion("TMUTEX <>", value, "tmutex");
return (Criteria) this;
}
public Criteria andTmutexGreaterThan(String value) {
addCriterion("TMUTEX >", value, "tmutex");
return (Criteria) this;
}
public Criteria andTmutexGreaterThanOrEqualTo(String value) {
addCriterion("TMUTEX >=", value, "tmutex");
return (Criteria) this;
}
public Criteria andTmutexLessThan(String value) {
addCriterion("TMUTEX <", value, "tmutex");
return (Criteria) this;
}
public Criteria andTmutexLessThanOrEqualTo(String value) {
addCriterion("TMUTEX <=", value, "tmutex");
return (Criteria) this;
}
public Criteria andTmutexLike(String value) {
addCriterion("TMUTEX like", value, "tmutex");
return (Criteria) this;
}
public Criteria andTmutexNotLike(String value) {
addCriterion("TMUTEX not like", value, "tmutex");
return (Criteria) this;
}
public Criteria andTmutexIn(List<String> values) {
addCriterion("TMUTEX in", values, "tmutex");
return (Criteria) this;
}
public Criteria andTmutexNotIn(List<String> values) {
addCriterion("TMUTEX not in", values, "tmutex");
return (Criteria) this;
}
public Criteria andTmutexBetween(String value1, String value2) {
addCriterion("TMUTEX between", value1, value2, "tmutex");
return (Criteria) this;
}
public Criteria andTmutexNotBetween(String value1, String value2) {
addCriterion("TMUTEX not between", value1, value2, "tmutex");
return (Criteria) this;
}
public Criteria andMutexTmIsNull() {
addCriterion("MUTEX_TM is null");
return (Criteria) this;
}
public Criteria andMutexTmIsNotNull() {
addCriterion("MUTEX_TM is not null");
return (Criteria) this;
}
public Criteria andMutexTmEqualTo(Long value) {
addCriterion("MUTEX_TM =", value, "mutexTm");
return (Criteria) this;
}
public Criteria andMutexTmNotEqualTo(Long value) {
addCriterion("MUTEX_TM <>", value, "mutexTm");
return (Criteria) this;
}
public Criteria andMutexTmGreaterThan(Long value) {
addCriterion("MUTEX_TM >", value, "mutexTm");
return (Criteria) this;
}
public Criteria andMutexTmGreaterThanOrEqualTo(Long value) {
addCriterion("MUTEX_TM >=", value, "mutexTm");
return (Criteria) this;
}
public Criteria andMutexTmLessThan(Long value) {
addCriterion("MUTEX_TM <", value, "mutexTm");
return (Criteria) this;
}
public Criteria andMutexTmLessThanOrEqualTo(Long value) {
addCriterion("MUTEX_TM <=", value, "mutexTm");
return (Criteria) this;
}
public Criteria andMutexTmIn(List<Long> values) {
addCriterion("MUTEX_TM in", values, "mutexTm");
return (Criteria) this;
}
public Criteria andMutexTmNotIn(List<Long> values) {
addCriterion("MUTEX_TM not in", values, "mutexTm");
return (Criteria) this;
}
public Criteria andMutexTmBetween(Long value1, Long value2) {
addCriterion("MUTEX_TM between", value1, value2, "mutexTm");
return (Criteria) this;
}
public Criteria andMutexTmNotBetween(Long value1, Long value2) {
addCriterion("MUTEX_TM not between", value1, value2, "mutexTm");
return (Criteria) this;
}
public Criteria andTstatusIsNull() {
addCriterion("TSTATUS is null");
return (Criteria) this;
}
public Criteria andTstatusIsNotNull() {
addCriterion("TSTATUS is not null");
return (Criteria) this;
}
public Criteria andTstatusEqualTo(String value) {
addCriterion("TSTATUS =", value, "tstatus");
return (Criteria) this;
}
public Criteria andTstatusNotEqualTo(String value) {
addCriterion("TSTATUS <>", value, "tstatus");
return (Criteria) this;
}
public Criteria andTstatusGreaterThan(String value) {
addCriterion("TSTATUS >", value, "tstatus");
return (Criteria) this;
}
public Criteria andTstatusGreaterThanOrEqualTo(String value) {
addCriterion("TSTATUS >=", value, "tstatus");
return (Criteria) this;
}
public Criteria andTstatusLessThan(String value) {
addCriterion("TSTATUS <", value, "tstatus");
return (Criteria) this;
}
public Criteria andTstatusLessThanOrEqualTo(String value) {
addCriterion("TSTATUS <=", value, "tstatus");
return (Criteria) this;
}
public Criteria andTstatusLike(String value) {
addCriterion("TSTATUS like", value, "tstatus");
return (Criteria) this;
}
public Criteria andTstatusNotLike(String value) {
addCriterion("TSTATUS not like", value, "tstatus");
return (Criteria) this;
}
public Criteria andTstatusIn(List<String> values) {
addCriterion("TSTATUS in", values, "tstatus");
return (Criteria) this;
}
public Criteria andTstatusNotIn(List<String> values) {
addCriterion("TSTATUS not in", values, "tstatus");
return (Criteria) this;
}
public Criteria andTstatusBetween(String value1, String value2) {
addCriterion("TSTATUS between", value1, value2, "tstatus");
return (Criteria) this;
}
public Criteria andTstatusNotBetween(String value1, String value2) {
addCriterion("TSTATUS not between", value1, value2, "tstatus");
return (Criteria) this;
}
public Criteria andAliveTmIsNull() {
addCriterion("ALIVE_TM is null");
return (Criteria) this;
}
public Criteria andAliveTmIsNotNull() {
addCriterion("ALIVE_TM is not null");
return (Criteria) this;
}
public Criteria andAliveTmEqualTo(Long value) {
addCriterion("ALIVE_TM =", value, "aliveTm");
return (Criteria) this;
}
public Criteria andAliveTmNotEqualTo(Long value) {
addCriterion("ALIVE_TM <>", value, "aliveTm");
return (Criteria) this;
}
public Criteria andAliveTmGreaterThan(Long value) {
addCriterion("ALIVE_TM >", value, "aliveTm");
return (Criteria) this;
}
public Criteria andAliveTmGreaterThanOrEqualTo(Long value) {
addCriterion("ALIVE_TM >=", value, "aliveTm");
return (Criteria) this;
}
public Criteria andAliveTmLessThan(Long value) {
addCriterion("ALIVE_TM <", value, "aliveTm");
return (Criteria) this;
}
public Criteria andAliveTmLessThanOrEqualTo(Long value) {
addCriterion("ALIVE_TM <=", value, "aliveTm");
return (Criteria) this;
}
public Criteria andAliveTmIn(List<Long> values) {
addCriterion("ALIVE_TM in", values, "aliveTm");
return (Criteria) this;
}
public Criteria andAliveTmNotIn(List<Long> values) {
addCriterion("ALIVE_TM not in", values, "aliveTm");
return (Criteria) this;
}
public Criteria andAliveTmBetween(Long value1, Long value2) {
addCriterion("ALIVE_TM between", value1, value2, "aliveTm");
return (Criteria) this;
}
public Criteria andAliveTmNotBetween(Long value1, Long value2) {
addCriterion("ALIVE_TM not between", value1, value2, "aliveTm");
return (Criteria) this;
}
public Criteria andValidIsNull() {
addCriterion("VALID is null");
return (Criteria) this;
}
public Criteria andValidIsNotNull() {
addCriterion("VALID is not null");
return (Criteria) this;
}
public Criteria andValidEqualTo(String value) {
addCriterion("VALID =", value, "valid");
return (Criteria) this;
}
public Criteria andValidNotEqualTo(String value) {
addCriterion("VALID <>", value, "valid");
return (Criteria) this;
}
public Criteria andValidGreaterThan(String value) {
addCriterion("VALID >", value, "valid");
return (Criteria) this;
}
public Criteria andValidGreaterThanOrEqualTo(String value) {
addCriterion("VALID >=", value, "valid");
return (Criteria) this;
}
public Criteria andValidLessThan(String value) {
addCriterion("VALID <", value, "valid");
return (Criteria) this;
}
public Criteria andValidLessThanOrEqualTo(String value) {
addCriterion("VALID <=", value, "valid");
return (Criteria) this;
}
public Criteria andValidLike(String value) {
addCriterion("VALID like", value, "valid");
return (Criteria) this;
}
public Criteria andValidNotLike(String value) {
addCriterion("VALID not like", value, "valid");
return (Criteria) this;
}
public Criteria andValidIn(List<String> values) {
addCriterion("VALID in", values, "valid");
return (Criteria) this;
}
public Criteria andValidNotIn(List<String> values) {
addCriterion("VALID not in", values, "valid");
return (Criteria) this;
}
public Criteria andValidBetween(String value1, String value2) {
addCriterion("VALID between", value1, value2, "valid");
return (Criteria) this;
}
public Criteria andValidNotBetween(String value1, String value2) {
addCriterion("VALID not between", value1, value2, "valid");
return (Criteria) this;
}
public Criteria andCrtTmIsNull() {
addCriterion("CRT_TM is null");
return (Criteria) this;
}
public Criteria andCrtTmIsNotNull() {
addCriterion("CRT_TM is not null");
return (Criteria) this;
}
public Criteria andCrtTmEqualTo(Long value) {
addCriterion("CRT_TM =", value, "crtTm");
return (Criteria) this;
}
public Criteria andCrtTmNotEqualTo(Long value) {
addCriterion("CRT_TM <>", value, "crtTm");
return (Criteria) this;
}
public Criteria andCrtTmGreaterThan(Long value) {
addCriterion("CRT_TM >", value, "crtTm");
return (Criteria) this;
}
public Criteria andCrtTmGreaterThanOrEqualTo(Long value) {
addCriterion("CRT_TM >=", value, "crtTm");
return (Criteria) this;
}
public Criteria andCrtTmLessThan(Long value) {
addCriterion("CRT_TM <", value, "crtTm");
return (Criteria) this;
}
public Criteria andCrtTmLessThanOrEqualTo(Long value) {
addCriterion("CRT_TM <=", value, "crtTm");
return (Criteria) this;
}
public Criteria andCrtTmIn(List<Long> values) {
addCriterion("CRT_TM in", values, "crtTm");
return (Criteria) this;
}
public Criteria andCrtTmNotIn(List<Long> values) {
addCriterion("CRT_TM not in", values, "crtTm");
return (Criteria) this;
}
public Criteria andCrtTmBetween(Long value1, Long value2) {
addCriterion("CRT_TM between", value1, value2, "crtTm");
return (Criteria) this;
}
public Criteria andCrtTmNotBetween(Long value1, Long value2) {
addCriterion("CRT_TM not between", value1, value2, "crtTm");
return (Criteria) this;
}
public Criteria andFirstStartTmIsNull() {
addCriterion("FIRST_START_TM is null");
return (Criteria) this;
}
public Criteria andFirstStartTmIsNotNull() {
addCriterion("FIRST_START_TM is not null");
return (Criteria) this;
}
public Criteria andFirstStartTmEqualTo(Long value) {
addCriterion("FIRST_START_TM =", value, "firstStartTm");
return (Criteria) this;
}
public Criteria andFirstStartTmNotEqualTo(Long value) {
addCriterion("FIRST_START_TM <>", value, "firstStartTm");
return (Criteria) this;
}
public Criteria andFirstStartTmGreaterThan(Long value) {
addCriterion("FIRST_START_TM >", value, "firstStartTm");
return (Criteria) this;
}
public Criteria andFirstStartTmGreaterThanOrEqualTo(Long value) {
addCriterion("FIRST_START_TM >=", value, "firstStartTm");
return (Criteria) this;
}
public Criteria andFirstStartTmLessThan(Long value) {
addCriterion("FIRST_START_TM <", value, "firstStartTm");
return (Criteria) this;
}
public Criteria andFirstStartTmLessThanOrEqualTo(Long value) {
addCriterion("FIRST_START_TM <=", value, "firstStartTm");
return (Criteria) this;
}
public Criteria andFirstStartTmIn(List<Long> values) {
addCriterion("FIRST_START_TM in", values, "firstStartTm");
return (Criteria) this;
}
public Criteria andFirstStartTmNotIn(List<Long> values) {
addCriterion("FIRST_START_TM not in", values, "firstStartTm");
return (Criteria) this;
}
public Criteria andFirstStartTmBetween(Long value1, Long value2) {
addCriterion("FIRST_START_TM between", value1, value2, "firstStartTm");
return (Criteria) this;
}
public Criteria andFirstStartTmNotBetween(Long value1, Long value2) {
addCriterion("FIRST_START_TM not between", value1, value2, "firstStartTm");
return (Criteria) this;
}
public Criteria andFirstEndTmIsNull() {
addCriterion("FIRST_END_TM is null");
return (Criteria) this;
}
public Criteria andFirstEndTmIsNotNull() {
addCriterion("FIRST_END_TM is not null");
return (Criteria) this;
}
public Criteria andFirstEndTmEqualTo(Long value) {
addCriterion("FIRST_END_TM =", value, "firstEndTm");
return (Criteria) this;
}
public Criteria andFirstEndTmNotEqualTo(Long value) {
addCriterion("FIRST_END_TM <>", value, "firstEndTm");
return (Criteria) this;
}
public Criteria andFirstEndTmGreaterThan(Long value) {
addCriterion("FIRST_END_TM >", value, "firstEndTm");
return (Criteria) this;
}
public Criteria andFirstEndTmGreaterThanOrEqualTo(Long value) {
addCriterion("FIRST_END_TM >=", value, "firstEndTm");
return (Criteria) this;
}
public Criteria andFirstEndTmLessThan(Long value) {
addCriterion("FIRST_END_TM <", value, "firstEndTm");
return (Criteria) this;
}
public Criteria andFirstEndTmLessThanOrEqualTo(Long value) {
addCriterion("FIRST_END_TM <=", value, "firstEndTm");
return (Criteria) this;
}
public Criteria andFirstEndTmIn(List<Long> values) {
addCriterion("FIRST_END_TM in", values, "firstEndTm");
return (Criteria) this;
}
public Criteria andFirstEndTmNotIn(List<Long> values) {
addCriterion("FIRST_END_TM not in", values, "firstEndTm");
return (Criteria) this;
}
public Criteria andFirstEndTmBetween(Long value1, Long value2) {
addCriterion("FIRST_END_TM between", value1, value2, "firstEndTm");
return (Criteria) this;
}
public Criteria andFirstEndTmNotBetween(Long value1, Long value2) {
addCriterion("FIRST_END_TM not between", value1, value2, "firstEndTm");
return (Criteria) this;
}
public Criteria andLastStartTmIsNull() {
addCriterion("LAST_START_TM is null");
return (Criteria) this;
}
public Criteria andLastStartTmIsNotNull() {
addCriterion("LAST_START_TM is not null");
return (Criteria) this;
}
public Criteria andLastStartTmEqualTo(Long value) {
addCriterion("LAST_START_TM =", value, "lastStartTm");
return (Criteria) this;
}
public Criteria andLastStartTmNotEqualTo(Long value) {
addCriterion("LAST_START_TM <>", value, "lastStartTm");
return (Criteria) this;
}
public Criteria andLastStartTmGreaterThan(Long value) {
addCriterion("LAST_START_TM >", value, "lastStartTm");
return (Criteria) this;
}
public Criteria andLastStartTmGreaterThanOrEqualTo(Long value) {
addCriterion("LAST_START_TM >=", value, "lastStartTm");
return (Criteria) this;
}
public Criteria andLastStartTmLessThan(Long value) {
addCriterion("LAST_START_TM <", value, "lastStartTm");
return (Criteria) this;
}
public Criteria andLastStartTmLessThanOrEqualTo(Long value) {
addCriterion("LAST_START_TM <=", value, "lastStartTm");
return (Criteria) this;
}
public Criteria andLastStartTmIn(List<Long> values) {
addCriterion("LAST_START_TM in", values, "lastStartTm");
return (Criteria) this;
}
public Criteria andLastStartTmNotIn(List<Long> values) {
addCriterion("LAST_START_TM not in", values, "lastStartTm");
return (Criteria) this;
}
public Criteria andLastStartTmBetween(Long value1, Long value2) {
addCriterion("LAST_START_TM between", value1, value2, "lastStartTm");
return (Criteria) this;
}
public Criteria andLastStartTmNotBetween(Long value1, Long value2) {
addCriterion("LAST_START_TM not between", value1, value2, "lastStartTm");
return (Criteria) this;
}
public Criteria andLastEndTmIsNull() {
addCriterion("LAST_END_TM is null");
return (Criteria) this;
}
public Criteria andLastEndTmIsNotNull() {
addCriterion("LAST_END_TM is not null");
return (Criteria) this;
}
public Criteria andLastEndTmEqualTo(Long value) {
addCriterion("LAST_END_TM =", value, "lastEndTm");
return (Criteria) this;
}
public Criteria andLastEndTmNotEqualTo(Long value) {
addCriterion("LAST_END_TM <>", value, "lastEndTm");
return (Criteria) this;
}
public Criteria andLastEndTmGreaterThan(Long value) {
addCriterion("LAST_END_TM >", value, "lastEndTm");
return (Criteria) this;
}
public Criteria andLastEndTmGreaterThanOrEqualTo(Long value) {
addCriterion("LAST_END_TM >=", value, "lastEndTm");
return (Criteria) this;
}
public Criteria andLastEndTmLessThan(Long value) {
addCriterion("LAST_END_TM <", value, "lastEndTm");
return (Criteria) this;
}
public Criteria andLastEndTmLessThanOrEqualTo(Long value) {
addCriterion("LAST_END_TM <=", value, "lastEndTm");
return (Criteria) this;
}
public Criteria andLastEndTmIn(List<Long> values) {
addCriterion("LAST_END_TM in", values, "lastEndTm");
return (Criteria) this;
}
public Criteria andLastEndTmNotIn(List<Long> values) {
addCriterion("LAST_END_TM not in", values, "lastEndTm");
return (Criteria) this;
}
public Criteria andLastEndTmBetween(Long value1, Long value2) {
addCriterion("LAST_END_TM between", value1, value2, "lastEndTm");
return (Criteria) this;
}
public Criteria andLastEndTmNotBetween(Long value1, Long value2) {
addCriterion("LAST_END_TM not between", value1, value2, "lastEndTm");
return (Criteria) this;
}
public Criteria andRunCntIsNull() {
addCriterion("RUN_CNT is null");
return (Criteria) this;
}
public Criteria andRunCntIsNotNull() {
addCriterion("RUN_CNT is not null");
return (Criteria) this;
}
public Criteria andRunCntEqualTo(Long value) {
addCriterion("RUN_CNT =", value, "runCnt");
return (Criteria) this;
}
public Criteria andRunCntNotEqualTo(Long value) {
addCriterion("RUN_CNT <>", value, "runCnt");
return (Criteria) this;
}
public Criteria andRunCntGreaterThan(Long value) {
addCriterion("RUN_CNT >", value, "runCnt");
return (Criteria) this;
}
public Criteria andRunCntGreaterThanOrEqualTo(Long value) {
addCriterion("RUN_CNT >=", value, "runCnt");
return (Criteria) this;
}
public Criteria andRunCntLessThan(Long value) {
addCriterion("RUN_CNT <", value, "runCnt");
return (Criteria) this;
}
public Criteria andRunCntLessThanOrEqualTo(Long value) {
addCriterion("RUN_CNT <=", value, "runCnt");
return (Criteria) this;
}
public Criteria andRunCntIn(List<Long> values) {
addCriterion("RUN_CNT in", values, "runCnt");
return (Criteria) this;
}
public Criteria andRunCntNotIn(List<Long> values) {
addCriterion("RUN_CNT not in", values, "runCnt");
return (Criteria) this;
}
public Criteria andRunCntBetween(Long value1, Long value2) {
addCriterion("RUN_CNT between", value1, value2, "runCnt");
return (Criteria) this;
}
public Criteria andRunCntNotBetween(Long value1, Long value2) {
addCriterion("RUN_CNT not between", value1, value2, "runCnt");
return (Criteria) this;
}
public Criteria andTkeyLikeInsensitive(String value) {
addCriterion("upper(TKEY) like", value.toUpperCase(), "tkey");
return (Criteria) this;
}
public Criteria andTserviceLikeInsensitive(String value) {
addCriterion("upper(TSERVICE) like", value.toUpperCase(), "tservice");
return (Criteria) this;
}
public Criteria andParamLikeInsensitive(String value) {
addCriterion("upper(PARAM) like", value.toUpperCase(), "param");
return (Criteria) this;
}
public Criteria andConfTypeLikeInsensitive(String value) {
addCriterion("upper(CONF_TYPE) like", value.toUpperCase(), "confType");
return (Criteria) this;
}
public Criteria andConfCronExpressionLikeInsensitive(String value) {
addCriterion("upper(CONF_CRON_EXPRESSION) like", value.toUpperCase(), "confCronExpression");
return (Criteria) this;
}
public Criteria andProcessorLikeInsensitive(String value) {
addCriterion("upper(PROCESSOR) like", value.toUpperCase(), "processor");
return (Criteria) this;
}
public Criteria andTmutexLikeInsensitive(String value) {
addCriterion("upper(TMUTEX) like", value.toUpperCase(), "tmutex");
return (Criteria) this;
}
public Criteria andTstatusLikeInsensitive(String value) {
addCriterion("upper(TSTATUS) like", value.toUpperCase(), "tstatus");
return (Criteria) this;
}
public Criteria andValidLikeInsensitive(String value) {
addCriterion("upper(VALID) like", value.toUpperCase(), "valid");
return (Criteria) this;
}
}
public static class Criteria extends GeneratedCriteria {
protected Criteria() {
super();
}
}
public static class Criterion {
private String condition;
private Object value;
private Object secondValue;
private boolean noValue;
private boolean singleValue;
private boolean betweenValue;
private boolean listValue;
private String typeHandler;
public String getCondition() {
return condition;
}
public Object getValue() {
return value;
}
public Object getSecondValue() {
return secondValue;
}
public boolean isNoValue() {
return noValue;
}
public boolean isSingleValue() {
return singleValue;
}
public boolean isBetweenValue() {
return betweenValue;
}
public boolean isListValue() {
return listValue;
}
public String getTypeHandler() {
return typeHandler;
}
protected Criterion(String condition) {
super();
this.condition = condition;
this.typeHandler = null;
this.noValue = true;
}
protected Criterion(String condition, Object value, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.typeHandler = typeHandler;
if (value instanceof List<?>) {
this.listValue = true;
} else {
this.singleValue = true;
}
}
protected Criterion(String condition, Object value) {
this(condition, value, null);
}
protected Criterion(String condition, Object value, Object secondValue, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.secondValue = secondValue;
this.typeHandler = typeHandler;
this.betweenValue = true;
}
protected Criterion(String condition, Object value, Object secondValue) {
this(condition, value, secondValue, null);
}
}
} |
total_duplicates = 0
for i in range(len(array)):
for j in range(i+1, len(array)):
if array[i] == array[j]
total_duplicates += 1
print("Total Duplicates:", total_duplicates) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.