text stringlengths 1 1.05M |
|---|
<reponame>firesbane13/FRC_TrialCode_2022<filename>src/main/java/frc/robot/subsystems/LightingSubsystem.java
package frc.robot.subsystems;
import edu.wpi.first.wpilibj2.command.SubsystemBase;
public class LightingSubsystem extends SubsystemBase {
public LightingSubsystem() {
}
@Override
public void periodic() {
}
@Override
public void simulationPeriodic() {
}
public boolean setHue() {
boolean status = false;
return status;
}
public boolean heartbeat() {
boolean status = false;
return status;
}
public boolean setRedAlliance() {
boolean status = false;
return status;
}
public boolean setBlueAlliance() {
boolean status = false;
return status;
}
public boolean setTeamColors() {
boolean status = false;
return status;
}
}
|
<reponame>filewalkwithme/vulcan-scan-engine<gh_stars>1-10
/*
Copyright 2021 Adevinta
*/
package service
import (
"context"
"encoding/json"
errs "errors"
"fmt"
"net/http"
"strings"
"time"
"github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level"
uuid "github.com/satori/go.uuid"
validator "gopkg.in/go-playground/validator.v9"
"github.com/adevinta/errors"
"github.com/adevinta/vulcan-core-cli/vulcan-core/client"
metrics "github.com/adevinta/vulcan-metrics-client"
"github.com/adevinta/vulcan-scan-engine/pkg/api"
"github.com/adevinta/vulcan-scan-engine/pkg/api/persistence"
"github.com/adevinta/vulcan-scan-engine/pkg/notify"
"github.com/adevinta/vulcan-scan-engine/pkg/stream"
"github.com/adevinta/vulcan-scan-engine/pkg/util"
)
const (
// ScanStatusRunning status when a Scan is created.
ScanStatusRunning = "RUNNING"
// ScanStatusFinished status when a Scan has all the checks in a terminal status.
ScanStatusFinished = "FINISHED"
// Scan metrics.
componentTag = "component:scanengine"
scanCountMetric = "vulcan.scan.count"
scanCompletionMetric = "vulcan.scan.completion"
checkCountMetric = "vulcan.scan.check.count"
metricsScanCreated = "created"
metricsScanFinished = "finished"
// LogFields
notValidProgressField = "InvalidProgress"
fixingProgressField = "FixingInvalidProgress"
)
// ChecktypesInformer represents an informer for the mapping
// between checktypes and supported asset types.
type ChecktypesInformer interface {
IndexAssettypes(ctx context.Context, path string) (*http.Response, error)
DecodeAssettypeCollection(resp *http.Response) (client.AssettypeCollection, error)
}
// ChecksCreator abstracts the actual implementation
// for the checks creation process.
type ChecksCreator interface {
CreateScanChecks(id string) error
}
type scanStats struct {
TotalNumberOfChecks int
NumberOfChecksPerChecktype map[string]int
}
// ChecktypesByAssettypes is used as a lookup table to check if a checktype can
// be run against a concrete assettype.
type ChecktypesByAssettypes map[string]map[string]struct{}
// ScansService implements the functionality needed to create and query scans.
type ScansService struct {
db persistence.ScansStore
logger log.Logger
ctInformer ChecktypesInformer
metricsClient metrics.Client
ccreator ChecksCreator
scansNotifier notify.Notifier
checksNotifier notify.Notifier
streamClient stream.Client
}
// New Creates and returns ScansService with all the dependencies wired in.
func New(logger log.Logger, db persistence.ScansStore, client ChecktypesInformer,
metricsClient metrics.Client, ccreator ChecksCreator, scansNotifier notify.Notifier,
checksNotifier notify.Notifier, streamClient stream.Client) ScansService {
return ScansService{
db: db,
logger: logger,
ctInformer: client,
ccreator: ccreator,
metricsClient: metricsClient,
scansNotifier: scansNotifier,
checksNotifier: checksNotifier,
streamClient: streamClient,
}
}
// ListScans returns the list of scans.
func (s ScansService) ListScans(ctx context.Context, extID string, offset, limit uint32) ([]api.Scan, error) {
var err error
scans := []api.Scan{}
if extID == "" {
scans, err = s.db.GetScans(offset, limit)
} else {
scans, err = s.db.GetScansByExternalID(extID, offset, limit)
}
return scans, err
}
// GetScan returns the scan corresponding with a given id.
func (s ScansService) GetScan(ctx context.Context, scanID string) (api.Scan, error) {
id, err := uuid.FromString(scanID)
if err != nil {
return api.Scan{}, errors.Assertion(fmt.Sprintf("not valid scan ID %s", scanID))
}
scan, err := s.db.GetScanByID(id)
if err != nil {
return api.Scan{}, err
}
return scan, nil
}
// GetScanChecks returns the checks for the scan with the given id.
func (s ScansService) GetScanChecks(ctx context.Context, scanID, status string) ([]api.Check, error) {
id, err := uuid.FromString(scanID)
if err != nil {
return []api.Check{}, errors.Assertion(fmt.Sprintf("not valid scan ID %s", scanID))
}
checks := []api.Check{}
if status == "" {
checks, err = s.db.GetScanChecks(id)
} else {
checks, err = s.db.GetScanChecksByStatus(id, status)
}
return checks, err
}
// GetScanStats returns the check stats for the given scan ID.
func (s ScansService) GetScanStats(ctx context.Context, scanID string) ([]api.CheckStats, error) {
id, err := uuid.FromString(scanID)
if err != nil {
return nil, errors.Assertion(fmt.Sprintf("not valid scan ID %s", scanID))
}
stats, err := s.db.GetScanStats(id)
if err != nil {
return nil, err
}
var checkStats []api.CheckStats
for status, total := range stats {
if total > 0 {
checkStats = append(checkStats, api.CheckStats{
Status: status,
Total: total,
})
}
}
return checkStats, nil
}
// GetCheck returns the check for the given check ID.
func (s ScansService) GetCheck(ctx context.Context, checkID string) (api.Check, error) {
id, err := uuid.FromString(checkID)
if err != nil {
return api.Check{}, errors.Assertion(fmt.Sprintf("not valid check ID %s", checkID))
}
return s.db.GetCheckByID(id)
}
// AbortScan is called in order to signal the vulcan core to try to abort and on going scan.
func (s ScansService) AbortScan(ctx context.Context, scanID string) error {
id, err := uuid.FromString(scanID)
if err != nil {
return errors.Assertion(fmt.Sprintf("not valid scan ID %s", scanID))
}
scan, err := s.db.GetScanByID(id)
if err != nil {
return err
}
if scan.Status != nil && (*scan.Status == ScanStatusFinished) {
errMssg := fmt.Sprintf("scan is in terminal status %s", *scan.Status)
return &errors.Error{
Kind: errs.New("conflict"),
Message: errMssg,
HTTPStatusCode: http.StatusConflict,
}
}
checks, err := s.db.GetScanChecks(id)
if err != nil {
return err
}
var checkIDs []string
for _, c := range checks {
checkIDs = append(checkIDs, c.ID)
}
return s.streamClient.AbortChecks(ctx, checkIDs)
}
func (s ScansService) CreateScan(ctx context.Context, scan *api.Scan) (uuid.UUID, error) {
if scan == nil {
return uuid.Nil, errors.Default("unexpected nil value creating a scan")
}
now := time.Now()
scan.StartTime = &now
id, err := uuid.NewV4()
if err != nil {
return uuid.Nil, err
}
scan.ID = id
status := ScanStatusRunning
scan.Status = &status
ctypesInfo, err := s.checktypesByAssettype(ctx)
if err != nil {
return uuid.Nil, err
}
scan.ChecktypesInfo = ctypesInfo
stats, err := s.getScanStats(ctx, ctypesInfo, scan)
if err != nil {
return uuid.Nil, err
}
scan.CheckCount = &stats.TotalNumberOfChecks
zero := 0
scan.ChecksCreated = &zero
scan.ChecksFinished = &zero
_, err = s.db.CreateScan(id, *scan)
if err != nil {
return uuid.Nil, err
}
// Push metrics.
s.pushScanMetrics(metricsScanCreated, util.Ptr2Str(scan.Tag), util.Ptr2Str(scan.ExternalID), stats)
_ = level.Warn(s.logger).Log("ScanCreated", id)
go func() {
err := s.ccreator.CreateScanChecks(id.String())
if err != nil {
_ = level.Error(s.logger).Log("ErrorCreatingChecks", err)
}
}()
return id, nil
}
func (s ScansService) getScanStats(ctx context.Context, checktypesInfo ChecktypesByAssettypes, scan *api.Scan) (scanStats, error) {
stats := scanStats{
NumberOfChecksPerChecktype: map[string]int{},
}
if scan.TargetGroups == nil {
// If this field is nil it means this scan is using a versión of the
// create scan request that does not support metrics any more, just
// return empty stats.
return scanStats{}, nil
}
for _, group := range *scan.TargetGroups {
for _, a := range group.TargetGroup.Targets {
for _, c := range group.ChecktypesGroup.Checktypes {
validChecksForAsset, ok := checktypesInfo[a.Type]
if !ok {
return scanStats{}, fmt.Errorf("invalid assettype %s", a.Type)
}
_, ok = validChecksForAsset[c.Name]
if !ok {
// If the check is not present in the map for assettype it means
// the checktype cannot run against this asset.
continue
}
stats.TotalNumberOfChecks = stats.TotalNumberOfChecks + 1
tag := fmt.Sprint("checktype:", c.Name)
n := stats.NumberOfChecksPerChecktype[tag]
stats.NumberOfChecksPerChecktype[tag] = n + 1
}
}
}
return stats, nil
}
func (s ScansService) checktypesByAssettype(ctx context.Context) (ChecktypesByAssettypes, error) {
resp, err := s.ctInformer.IndexAssettypes(ctx, client.IndexAssettypesPath())
if err != nil {
return nil, err
}
assettypes, err := s.ctInformer.DecodeAssettypeCollection(resp)
if err != nil {
return nil, err
}
ret := ChecktypesByAssettypes{}
for _, a := range assettypes {
if a.Assettype == nil {
continue
}
if _, ok := ret[*a.Assettype]; !ok {
ret[*a.Assettype] = map[string]struct{}{}
}
for _, c := range a.Name {
ret[*a.Assettype][c] = struct{}{}
}
}
return ret, nil
}
// ProcessScanCheckNotification process and update the checks. The func will
// return nil if the event must be marked as consumed by the caller.
func (s ScansService) ProcessScanCheckNotification(ctx context.Context, msg []byte) error {
_ = level.Debug(s.logger).Log("ProcessingMessage", string(msg))
// Parse check message.
checkMssg := api.Check{}
err := json.Unmarshal(msg, &checkMssg)
if err != nil {
_ = level.Error(s.logger).Log(err)
return nil
}
err = validator.New().Struct(checkMssg)
if err != nil {
_ = level.Error(s.logger).Log("ErrorValidatingCheckUpdateEvent", err)
return nil
}
checkMssg.Data = msg
checkProgress := util.Ptr2Float(checkMssg.Progress)
checkID, err := uuid.FromString(checkMssg.ID)
if err != nil {
_ = level.Error(s.logger).Log("NotValidCheckID", err)
return nil
}
// If the progress is incorrect and the status of the check is terminal we
// rapair it. If it's incorrect but the status is not terminal we just
// ignore the message.
if checkProgress > 1.0 || checkProgress < 0.0 {
if !api.CheckStates.IsTerminal(checkMssg.Status) {
_ = level.Error(s.logger).Log(notValidProgressField, checkMssg.Progress, "Status", checkMssg.Status, "CheckID", checkMssg.ID)
return nil
}
_ = level.Error(s.logger).Log(fixingProgressField, checkProgress, "Status", checkMssg.Status, "CheckID", checkMssg.ID)
checkProgress = 1
checkMssg.Progress = &checkProgress
}
dbCheck, err := s.db.GetCheckByID(checkID)
if err != nil {
_ = level.Error(s.logger).Log("CheckForMsgDoesNotExist", err)
return nil
}
scanID, err := uuid.FromString(dbCheck.ScanID)
if err != nil {
_ = level.Error(s.logger).Log("NotValidScanID", err)
return nil
}
_, err = s.db.UpsertCheck(scanID, checkID, checkMssg, api.CheckStates.LessOrEqual(checkMssg.Status))
if err != nil {
return err
}
// If the message does not have any status specified is because it is only
// for comunicating other info like the url of the logs, so we don't need to
// take it into account for sending metrics or publising a status change.
if checkMssg.Status == "" {
return nil
}
// As a check message does not contain all the information
// of a check we must merge with the the info of the check in the DB.
check := mergeChecks(dbCheck, checkMssg)
if err != nil {
return err
}
s.pushCheckMetrics(check)
err = s.notifyCheck(check)
if err != nil {
return err
}
// If the status of the check is not terminal it will not affect the status
// of the scan, so we are done.
if !api.CheckStates.IsTerminal(checkMssg.Status) {
return nil
}
// Count the check as finished in its scan. Note that this operation is
// idempotent, that means: even if called multiple times, for a given check
// it will only increase by one the number of checks finished in the scan.
_, err = s.db.AddCheckAsFinished(checkID)
if err != nil {
return err
}
scanCount, status, err := s.updateScanStatus(scanID)
if err != nil {
return err
}
if scanCount > 0 {
_ = level.Info(s.logger).Log("ScanStatusUpdated", string(msg))
_ = level.Debug(s.logger).Log("ScanStatusSet", scanID.String()+";"+status)
}
if status == ScanStatusFinished {
err = s.notifyScan(scanID)
}
return err
}
func (s ScansService) notifyScan(scanID uuid.UUID) error {
scan, err := s.GetScan(context.Background(), scanID.String())
if err != nil {
return err
}
s.pushScanMetrics(metricsScanFinished, util.Ptr2Str(scan.Tag), util.Ptr2Str(scan.ExternalID), scanStats{})
return s.scansNotifier.Push(scan.ToScanNotification(), nil)
}
func (s ScansService) notifyCheck(check api.Check) error {
ctname := "unknown"
if check.ChecktypeName != nil {
ctname = *check.ChecktypeName
}
attributes := map[string]string{
"checktype_name": ctname,
"status": check.Status,
}
return s.checksNotifier.Push(check.ToCheckNotification(), attributes)
}
func (s ScansService) updateScanStatus(id uuid.UUID) (int64, string, error) {
scan, err := s.db.GetScanStatus(id)
if errors.IsKind(err, errors.ErrNotFound) {
return 0, "", err
}
if err != nil {
return 0, "", err
}
if scan.Status == nil {
err := fmt.Errorf("scan with id %s does not have mandatory field status", id.String())
return 0, "", err
}
if util.Ptr2Str(scan.Status) == ScanStatusFinished {
return 0, ScanStatusFinished, nil
}
if scan.CheckCount == nil {
err := fmt.Errorf("scan with id %s does not have mandatory field CheckCount", id.String())
return 0, "", err
}
if *scan.CheckCount < 1 {
_ = level.Error(s.logger).Log(ErrAtLeastOneTargetAndChecktype)
return 0, "", ErrAtLeastOneTargetAndChecktype
}
if scan.ChecksFinished == nil {
err := fmt.Errorf("scan with id %s does not have mandatory field ChecksFinished", id.String())
return 0, "", err
}
status := *scan.Status
count := *scan.CheckCount
finished := *scan.ChecksFinished
progress := float32(finished) / float32(count)
update := api.Scan{}
update.ID = id
update.Progress = &progress
if (status == ScanStatusRunning) && (count == finished) {
status = ScanStatusFinished
update.Status = &status
now := time.Now()
update.EndTime = &now
}
n, err := s.db.UpdateScan(id, update, []string{ScanStatusRunning})
tag := buildScanTag(util.Ptr2Str(scan.Tag), util.Ptr2Str(scan.ExternalID))
// Push scan progress metrics.
s.metricsClient.Push(metrics.Metric{
Name: scanCompletionMetric,
Typ: metrics.Histogram,
Value: float64(util.Ptr2Float(update.Progress)),
Tags: []string{componentTag, tag},
})
return n, status, err
}
// pushScanMetrics pushes metrics related to the scan status and its checks if applicable.
func (s ScansService) pushScanMetrics(scanStatus, teamTag, programID string, stats scanStats) {
scanTag := buildScanTag(teamTag, programID)
scanStatusTag := fmt.Sprint("scanstatus:", scanStatus)
checkStatusTag := "checkstatus:requested"
s.metricsClient.Push(metrics.Metric{
Name: scanCountMetric,
Typ: metrics.Count,
Value: 1,
Tags: []string{componentTag, scanTag, scanStatusTag},
})
for checkTypeTag, count := range stats.NumberOfChecksPerChecktype {
s.metricsClient.Push(metrics.Metric{
Name: checkCountMetric,
Typ: metrics.Count,
Value: float64(count),
Tags: []string{componentTag, scanTag, checkStatusTag, checkTypeTag},
})
}
}
// pushCheckMetrics pushes metrics related to the check status.
func (s ScansService) pushCheckMetrics(check api.Check) {
var program, team string
if check.Metadata != nil {
metadata := *check.Metadata
program = metadata["program"]
team = metadata["team"]
}
scanTag := buildScanTag(team, program)
checkStatusTag := fmt.Sprint("checkstatus:", check.Status)
checktypeTag := fmt.Sprint("checktype:", util.Ptr2Str(check.ChecktypeName))
s.metricsClient.Push(metrics.Metric{
Name: checkCountMetric,
Typ: metrics.Count,
Value: 1,
Tags: []string{componentTag, scanTag, checkStatusTag, checktypeTag},
})
}
// buildScanTag builds the metrics scan tag.
func buildScanTag(teamTag string, programID string) string {
var teamLabel, programLabel string
if teamTag == "" {
teamLabel = "unknown"
} else {
teamTagParts := strings.Split(teamTag, ":")
teamLabel = teamTagParts[len(teamTagParts)-1]
}
if programID == "" {
programLabel = "unknown"
} else {
programLabel = programID
// Check for global program
if strings.Contains(programID, "@") {
programLabel = strings.Split(programID, "@")[1]
}
}
return fmt.Sprint("scan:", teamLabel, "-", programLabel)
}
func mergeChecks(old api.Check, new api.Check) api.Check {
c := old
if new.Status != "" {
c.Status = new.Status
}
if util.Ptr2Float(new.Progress) != 0 {
c.Progress = new.Progress
}
if util.Ptr2Str(new.Report) != "" {
c.Report = new.Report
}
if util.Ptr2Str(new.Raw) != "" {
c.Raw = new.Raw
}
return c
}
|
# Copyright (c) npm, Inc. and Contributors
# All rights reserved.
###-begin-{pkgname}-completion-###
### credits to npm, this file is coming directly from isaacs/npm repo
#
# Just testing for now. (trying to learn this cool stuff)
#
# npm command completion script
#
# Installation: {completer} completion >> ~/.bashrc (or ~/.zshrc)
#
COMP_WORDBREAKS=${COMP_WORDBREAKS/=/}
COMP_WORDBREAKS=${COMP_WORDBREAKS/@/}
export COMP_WORDBREAKS
if type complete &>/dev/null; then
_{pkgname}_completion () {
local si="$IFS"
IFS=$'\n' COMPREPLY=($(COMP_CWORD="$COMP_CWORD" \
COMP_LINE="$COMP_LINE" \
COMP_POINT="$COMP_POINT" \
{completer} completion -- "${COMP_WORDS[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
complete -F _{pkgname}_completion {pkgname}
elif type compctl &>/dev/null; then
_{pkgname}_completion () {
local cword line point words si
read -Ac words
read -cn cword
let cword-=1
read -l line
read -ln point
si="$IFS"
IFS=$'\n' reply=($(COMP_CWORD="$cword" \
COMP_LINE="$line" \
COMP_POINT="$point" \
{completer} completion -- "${words[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
# if the completer function returns on matches, default
# to filesystem matching
compctl -K _{pkgname}_completion + -f + {pkgname}
fi
###-end-{pkgname}-completion-###
|
import { Document } from "mongoose";
export interface BookmanOptions {
defaultDir: string;
databaseName: string;
mongodbURL?: string;
}
export interface LooseObject {
[prop: string]: unknown;
}
export interface IMongoModel extends Document {
key: string;
value: string;
}
|
#!/bin/bash
python setup.py sdist bdist_wheel
twine upload dist
|
#!/bin/bash
arg_input=$0
arg_output=$1
# remove illlegal characters and convert the code if necessary
cat $arg_input | tr -c '[:print:]\n' ' ' | sed 's/\*//g' > $arg_output
|
void insertIntoLinkedList(Line **ppHead, char *pszCommentStart, size_t uiStrLen) {
Line *newLine = (Line *)malloc(sizeof(Line));
newLine->pszComment = (char *)malloc((uiStrLen + 1) * sizeof(char)); // Allocate memory for the comment string
f_memcpy(newLine->pszComment, pszCommentStart, uiStrLen); // Copy the comment string
newLine->pszComment[uiStrLen] = '\0'; // Null-terminate the comment string
newLine->pNext = *ppHead; // Set the next pointer of the new struct to the current head of the linked list
*ppHead = newLine; // Update the head of the linked list to point to the new struct
} |
<reponame>dcoloma/gaia<gh_stars>1-10
'use strict';
mocha.globals(['BluetoothTransfer']);
requireApp(
'system/shared/test/unit/mocks/mock_navigator_moz_set_message_handler.js'
);
requireApp('system/test/unit/mock_navigator_get_device_storage.js');
requireApp('system/test/unit/mock_bluetooth.js');
requireApp('system/test/unit/mock_l10n.js');
requireApp('sms/shared/test/unit/mocks/mock_notification_helper.js');
var mocksForBluetoothTransfer = new MocksHelper([
'Bluetooth',
'NotificationHelper'
]).init();
suite('system/bluetooth_transfer', function() {
mocksForBluetoothTransfer.attachTestHelpers();
var realSetMessageHandler;
var realNavigatorGetDeviceStorage;
var realL10n;
var realPairList;
var real_sendingFilesQueue;
var fakePairList;
var fake_sendingFilesQueue;
suiteSetup(function(done) {
realSetMessageHandler = navigator.mozSetMessageHandler;
navigator.mozSetMessageHandler = MockNavigatormozSetMessageHandler;
realNavigatorGetDeviceStorage = navigator.getDeviceStorage;
navigator.getDeviceStorage = MockNavigatorGetDeviceStorage;
realL10n = navigator.mozL10n;
navigator.mozL10n = MockL10n;
MockNavigatormozSetMessageHandler.mSetup();
requireApp('system/js/bluetooth_transfer.js', done);
});
suiteTeardown(function() {
MockNavigatormozSetMessageHandler.mTeardown();
navigator.mozSetMessageHandler = realSetMessageHandler;
navigator.getDeviceStorage = realNavigatorGetDeviceStorage;
navigator.mozL10n = realL10n;
});
suite('UI', function() {
suite('getPairedDevice', function() {
suite('have paired devices', function() {
var getPairedDeviceCompleteCallback = function() {};
setup(function(done) {
getPairedDeviceCompleteCallback = this.sinon.spy();
BluetoothTransfer.getPairedDevice(function() {
getPairedDeviceCompleteCallback();
done();
});
});
test('have paired devices ', function() {
assert.ok(getPairedDeviceCompleteCallback.called);
});
});
});
suite('getDeviceName', function() {
setup(function() {
realPairList = BluetoothTransfer.pairList;
fakePairList = {
index: [{name: 'device-No1',
address: '00:11:22:AA:BB:CC'},
{name: 'device-No2',
address: 'AA:BB:CC:00:11:22'}
]};
BluetoothTransfer.pairList = fakePairList;
});
teardown(function() {
BluetoothTransfer.pairList = realPairList;
});
suite('have device name', function() {
test('have device name ', function() {
var address = 'AA:BB:CC:00:11:22';
var deviceName = 'device-No2';
assert.equal(deviceName, BluetoothTransfer.getDeviceName(address));
});
});
suite('no device name', function() {
setup(function() {
BluetoothTransfer.pairList = {
index: []
};
});
test('no device name ', function() {
var address = 'AA:BB:CC:00:11:22';
var deviceName = 'unknown-device';
assert.equal(deviceName, BluetoothTransfer.getDeviceName(address));
});
});
});
suite('humanizeSize', function() {
test('should handle zero size ', function() {
var expectedSize = 'fileSize{"size":"0.00","unit":"byteUnit-B"}';
assert.equal(expectedSize, BluetoothTransfer.humanizeSize(0));
});
test('should handle bytes size ', function() {
var expectedSize = 'fileSize{"size":"42.00","unit":"byteUnit-B"}';
assert.equal(expectedSize, BluetoothTransfer.humanizeSize(42));
});
test('should handle kilobytes size ', function() {
var expectedSize = 'fileSize{"size":"1.00","unit":"byteUnit-KB"}';
assert.equal(expectedSize, BluetoothTransfer.humanizeSize(1024));
});
test('should handle megabytes size ', function() {
var expectedSize = 'fileSize{"size":"4.67","unit":"byteUnit-MB"}';
assert.equal(expectedSize, BluetoothTransfer.humanizeSize(4901024));
});
test('should handle gigabytes size ', function() {
var expectedSize = 'fileSize{"size":"3.73","unit":"byteUnit-GB"}';
assert.equal(expectedSize, BluetoothTransfer.humanizeSize(4000901024));
});
});
suite('operate sending files queue ', function() {
suiteSetup(function() {
real_sendingFilesQueue = BluetoothTransfer._sendingFilesQueue;
fake_sendingFilesQueue = [{
numberOfFiles: 1,
numSuccessful: 0,
numUnsuccessful: 0
}];
BluetoothTransfer._sendingFilesQueue = fake_sendingFilesQueue;
});
suiteTeardown(function() {
BluetoothTransfer._sendingFilesQueue = real_sendingFilesQueue;
});
test('push sending files request in queue, then create notification ',
function() {
var sendingFilesSchedule = {
numberOfFiles: 2,
numSuccessful: 0,
numUnsuccessful: 0
};
var evt = {
detail: sendingFilesSchedule
};
var title = 'transfer-has-started-title';
BluetoothTransfer.onFilesSending(evt);
assert.equal(2, BluetoothTransfer._sendingFilesQueue.length);
assert.equal(MockNotificationHelper.mTitle, title);
});
test('received onTransferComplete callback for received task, ' +
'should be ignored.. ', function() {
var transferInfo = {
received: true,
success: true
};
BluetoothTransfer.summarizeSentFilesReport(transferInfo);
assert.equal(2, BluetoothTransfer._sendingFilesQueue.length);
assert.equal(MockNotificationHelper.mTitle, null);
});
test('received onTransferComplete callback for the first sent task, ' +
'should remove the tast from queue.. ', function() {
var transferInfo = {
received: false,
success: true
};
BluetoothTransfer.summarizeSentFilesReport(transferInfo);
assert.equal(1, BluetoothTransfer._sendingFilesQueue.length);
assert.equal(MockNotificationHelper.mTitle, null);
});
test('received onTransferComplete callback for the second sent task' +
' --> first file, should record success/fail report in queue.. ',
function() {
var transferInfo = {
received: false,
success: false
};
BluetoothTransfer.summarizeSentFilesReport(transferInfo);
assert.equal(1, BluetoothTransfer._sendingFilesQueue.length);
assert.equal(1,
BluetoothTransfer._sendingFilesQueue[0].numUnsuccessful);
assert.equal(MockNotificationHelper.mTitle, null);
});
test('received onTransferComplete callback for the second sent task' +
' --> the second file, should remove the tast from queue. Then, ' +
'create a report in notification.. ', function() {
var transferInfo = {
received: false,
success: true
};
var title = 'transferReport-title';
var body = 'transferReport-description' +
'{"numSuccessful":1,"numUnsuccessful":1}';
BluetoothTransfer.summarizeSentFilesReport(transferInfo);
assert.equal(0, BluetoothTransfer._sendingFilesQueue.length);
assert.equal(MockNotificationHelper.mTitle, title);
assert.equal(MockNotificationHelper.mBody, body);
});
});
});
});
|
/*
* Copyright 2019 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package io.smallrye.jwt.build;
import java.nio.charset.StandardCharsets;
import java.security.Key;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.util.Collections;
import java.util.Map;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import javax.json.Json;
import javax.json.JsonObject;
import org.eclipse.microprofile.config.ConfigProvider;
import org.eclipse.microprofile.config.spi.ConfigSource;
import org.jose4j.base64url.Base64Url;
import org.jose4j.json.JsonUtil;
import org.jose4j.jwe.JsonWebEncryption;
import org.jose4j.jws.JsonWebSignature;
import org.jose4j.jwt.JwtClaims;
import org.junit.Assert;
import org.junit.Test;
import io.smallrye.jwt.algorithm.KeyEncryptionAlgorithm;
import io.smallrye.jwt.util.KeyUtils;
public class JwtSignEncryptTest {
@Test
public void testSimpleInnerSignAndEncryptWithPemRsaPublicKey() throws Exception {
JwtBuildConfigSource configSource = getConfigSource();
try {
configSource.resetSigningKeyCallCount();
JwtClaimsBuilder builder = Jwt.claims().claim("customClaim", "custom-value");
String jti1 = checkRsaInnerSignedEncryptedClaims(builder.innerSign().encrypt());
Assert.assertNotNull(jti1);
String jti2 = checkRsaInnerSignedEncryptedClaims(builder.innerSign().encrypt());
Assert.assertNotNull(jti2);
Assert.assertNotEquals(jti1, jti2);
Assert.assertEquals(1, configSource.getSigningKeyCallCount());
} finally {
configSource.resetSigningKeyCallCount();
}
}
private String checkRsaInnerSignedEncryptedClaims(String jweCompact) throws Exception {
return checkRsaInnerSignedEncryptedClaims(jweCompact, "RSA-OAEP");
}
private String checkRsaInnerSignedEncryptedClaims(String jweCompact, String keyEncAlgo) throws Exception {
checkJweHeaders(jweCompact, keyEncAlgo, null);
JsonWebEncryption jwe = getJsonWebEncryption(jweCompact);
String jwtCompact = jwe.getPlaintextString();
JsonWebSignature jws = getVerifiedJws(jwtCompact);
JwtClaims claims = JwtClaims.parse(jws.getPayload());
Assert.assertEquals(4, claims.getClaimsMap().size());
checkClaimsAndJwsHeaders(jwtCompact, claims, "RS256", null);
Assert.assertEquals("custom-value", claims.getClaimValue("customClaim"));
return claims.getJwtId();
}
@Test
public void testInnerSignAndEncryptMapOfClaimsRsaOaep() throws Exception {
String jweCompact = Jwt.claims(Collections.singletonMap("customClaim", "custom-value"))
.innerSign().keyAlgorithm(KeyEncryptionAlgorithm.RSA_OAEP).encrypt();
checkRsaInnerSignedEncryptedClaims(jweCompact, KeyEncryptionAlgorithm.RSA_OAEP.getAlgorithm());
}
@Test
public void testInnerSignAndEncryptMapOfClaimsShortcut() throws Exception {
String jweCompact = Jwt.innerSignAndEncrypt(Collections.singletonMap("customClaim", "custom-value"));
checkRsaInnerSignedEncryptedClaims(jweCompact);
}
@Test
public void testInnerSignAndEncryptJsonObject() throws Exception {
JsonObject json = Json.createObjectBuilder().add("customClaim", "custom-value").build();
String jweCompact = Jwt.claims(json).innerSign().encrypt();
checkRsaInnerSignedEncryptedClaims(jweCompact);
}
@Test
public void testInnerSignAndEncryptJsonObjectShortcut() throws Exception {
JsonObject json = Json.createObjectBuilder().add("customClaim", "custom-value").build();
String jweCompact = Jwt.innerSignAndEncrypt(json);
checkRsaInnerSignedEncryptedClaims(jweCompact);
}
@Test
public void testInnerSignAndEncryptExistingClaims() throws Exception {
String jweCompact = Jwt.claims("/customClaim.json").innerSign().encrypt();
checkRsaInnerSignedEncryptedClaims(jweCompact);
}
@Test
public void testInnerSignAndEncryptExistingClaimsShortcut() throws Exception {
String jweCompact = Jwt.innerSignAndEncrypt("/customClaim.json");
checkRsaInnerSignedEncryptedClaims(jweCompact);
}
@Test
public void testInnerSignAndEncryptWithPemRsaPublicKeyWithHeaders() throws Exception {
String jweCompact = Jwt.claims()
.claim("customClaim", "custom-value")
.jws()
.keyId("sign-key-id")
.innerSign()
.keyId("key-enc-key-id")
.encrypt();
checkJweHeaders(jweCompact, "RSA-OAEP", "key-enc-key-id");
JsonWebEncryption jwe = getJsonWebEncryption(jweCompact);
String jwtCompact = jwe.getPlaintextString();
JsonWebSignature jws = getVerifiedJws(jwtCompact);
JwtClaims claims = JwtClaims.parse(jws.getPayload());
Assert.assertEquals(4, claims.getClaimsMap().size());
checkClaimsAndJwsHeaders(jwtCompact, claims, "RS256", "sign-key-id");
Assert.assertEquals("custom-value", claims.getClaimValue("customClaim"));
}
@Test
public void testInnerSignAndEncryptWithJwkRsaPublicKey() throws Exception {
JwtBuildConfigSource configSource = getConfigSource();
configSource.setEncryptionKeyLocation("/publicKey.jwk");
String jweCompact = null;
try {
jweCompact = Jwt.claims()
.claim("customClaim", "custom-value")
.jws()
.keyId("sign-key-id")
.innerSign()
.keyId("key1")
.encrypt();
} finally {
configSource.setEncryptionKeyLocation("/publicKey.pem");
}
checkJweHeaders(jweCompact, "RSA-OAEP", "key1");
JsonWebEncryption jwe = getJsonWebEncryption(jweCompact);
String jwtCompact = jwe.getPlaintextString();
JsonWebSignature jws = getVerifiedJws(jwtCompact);
JwtClaims claims = JwtClaims.parse(jws.getPayload());
Assert.assertEquals(4, claims.getClaimsMap().size());
checkClaimsAndJwsHeaders(jwtCompact, claims, "RS256", "sign-key-id");
Assert.assertEquals("custom-value", claims.getClaimValue("customClaim"));
}
@Test
public void testInnerSignWithSecretAndEncryptWithSecret() throws Exception {
String secret = "<KEY>";
String jweCompact = Jwt.claims()
.claim("customClaim", "custom-value")
.innerSignWithSecret(secret)
.encryptWithSecret(secret);
checkJweHeaders(jweCompact, "A256KW", null);
SecretKey secretKey = new SecretKeySpec(secret.getBytes(StandardCharsets.UTF_8), "AES");
JsonWebEncryption jwe = getJsonWebEncryption(jweCompact, secretKey);
String jwtCompact = jwe.getPlaintextString();
JsonWebSignature jws = getVerifiedJws(jwtCompact, secretKey);
JwtClaims claims = JwtClaims.parse(jws.getPayload());
Assert.assertEquals(4, claims.getClaimsMap().size());
checkClaimsAndJwsHeaders(jwtCompact, claims, "HS256", null);
Assert.assertEquals("custom-value", claims.getClaimValue("customClaim"));
}
private static JwtBuildConfigSource getConfigSource() {
for (ConfigSource cs : ConfigProvider.getConfig().getConfigSources()) {
if (cs instanceof JwtBuildConfigSource) {
return (JwtBuildConfigSource) cs;
}
}
return null;
}
private static PrivateKey getPrivateKey() throws Exception {
return KeyUtils.readPrivateKey("/privateKey.pem");
}
private static PublicKey getPublicKey() throws Exception {
return KeyUtils.readPublicKey("/publicKey.pem");
}
private static JsonWebSignature getVerifiedJws(String jwt) throws Exception {
return getVerifiedJws(jwt, getPublicKey());
}
private static JsonWebSignature getVerifiedJws(String jwt, Key key) throws Exception {
JsonWebSignature jws = new JsonWebSignature();
jws.setCompactSerialization(jwt);
jws.setKey(key);
Assert.assertTrue(jws.verifySignature());
return jws;
}
private static void checkClaimsAndJwsHeaders(String jwsCompact, JwtClaims claims, String algo, String keyId)
throws Exception {
Assert.assertNotNull(claims.getIssuedAt());
Assert.assertNotNull(claims.getExpirationTime());
Assert.assertNotNull(claims.getJwtId());
Map<String, Object> headers = getJwsHeaders(jwsCompact);
Assert.assertEquals(keyId != null ? 3 : 2, headers.size());
Assert.assertEquals(algo, headers.get("alg"));
Assert.assertEquals("JWT", headers.get("typ"));
if (keyId != null) {
Assert.assertEquals(keyId, headers.get("kid"));
} else {
Assert.assertNull(headers.get("kid"));
}
}
private static void checkJweHeaders(String jweCompact, String keyEncKeyAlg, String keyId) throws Exception {
Map<String, Object> jweHeaders = getJweHeaders(jweCompact);
Assert.assertEquals(keyId != null ? 4 : 3, jweHeaders.size());
Assert.assertEquals(keyEncKeyAlg, jweHeaders.get("alg"));
Assert.assertEquals("A256GCM", jweHeaders.get("enc"));
if (keyId != null) {
Assert.assertEquals(keyId, jweHeaders.get("kid"));
}
Assert.assertEquals("JWT", jweHeaders.get("cty"));
}
private static JsonWebEncryption getJsonWebEncryption(String compactJwe) throws Exception {
return getJsonWebEncryption(compactJwe, getPrivateKey());
}
private static JsonWebEncryption getJsonWebEncryption(String compactJwe, Key key) throws Exception {
JsonWebEncryption jwe = new JsonWebEncryption();
jwe.setCompactSerialization(compactJwe);
jwe.setKey(key);
return jwe;
}
private static Map<String, Object> getJweHeaders(String compactJwe) throws Exception {
int firstDot = compactJwe.indexOf(".");
String headersJson = new Base64Url().base64UrlDecodeToUtf8String(compactJwe.substring(0, firstDot));
return JsonUtil.parseJson(headersJson);
}
private static Map<String, Object> getJwsHeaders(String compactJws) throws Exception {
int firstDot = compactJws.indexOf(".");
String headersJson = new Base64Url().base64UrlDecodeToUtf8String(compactJws.substring(0, firstDot));
return JsonUtil.parseJson(headersJson);
}
}
|
autoload -U add-zsh-hook
autoload -U colors && colors
autoload -Uz vcs_info
setopt prompt_subst
# # Colors:
# # 9: Orange
# # 6: Teal
# # 5: Pink
# # 4: Blue
# # 3: Yellow
# # 2: Green
# # 1: Red
# # 0: Black
zstyle ':vcs_info:*' enable git
zstyle ':vcs_info:*' unstagedstr ' %F{1}M%f'
zstyle ':vcs_info:*' stagedstr ' %F{2}M%f'
zstyle ':vcs_info:*' check-for-changes true
zstyle ':vcs_info:*' formats "[%b%u%c%m]"
zstyle ':vcs_info:*' actionformats "[%b%u%c] %F{4}%a%f"
zstyle ':vcs_info:git*+set-message:*' hooks git-st git-stash git-untracked
# Show number of commits ahead or behind of the remote
function +vi-git-st() {
local ahead behind remote
# Are we on a remote-tracking branch?
remote=${$(git rev-parse --verify ${hook_com[branch]}"@{upstream}" \
--symbolic-full-name --abbrev-ref 2>/dev/null)}
if [[ -n ${remote} ]]; then
ahead=$(git rev-list ${hook_com[branch]}"@{upstream}"..HEAD 2>/dev/null \
| wc -l | tr -d ' ')
behind=$(git rev-list HEAD..${hook_com[branch]}"@{upstream}" 2>/dev/null \
| wc -l | tr -d ' ')
diff="${hook_com[branch]}"
if [[ $ahead -gt 0 ]]; then
diff="$diff %F{2}$ahead%f"
fi
if [[ $behind -gt 0 ]]; then
diff="$diff%F{1}$behind%f"
fi
hook_com[branch]="$diff"
fi
}
# Show the number of stashes
function +vi-git-stash() {
local -a stashes
stashes=$(git stash list 2>/dev/null | wc -l | tr -d ' ')
if [[ $stashes -gt 0 ]]; then
hook_com[misc]=" %F{4}${stashes}%f"
fi
}
# Show a U if there are untracked files
function +vi-git-untracked() {
untracked=$(git ls-files --other --exclude-standard | wc -l | tr -d ' ')
if [[ $untracked -gt 0 ]]; then
hook_com[misc]+="%F{1}?%f"
fi
}
function RCMD() {
vcs_info 2>/dev/null
echo "${vcs_info_msg_0_}"
}
function setup-prompt() {
git_info=$1
ssh=""
venv=""
venv_padding=""
# Show the hostname over SSH
if [[ -n $SSH_CONNECTION ]]; then
ssh="%m "
fi
if [[ -n $VIRTUAL_ENV ]]; then
venv="v"
venv_padding=" "
fi
NEW_PROMPT=""
# The local hostname if the current session is over ssh
NEW_PROMPT="$NEW_PROMPT$ssh"
# Opening paren and 2 components of pwd
NEW_PROMPT="$NEW_PROMPT(%2c"
# Yellow number of jobs, if there are any jobs, otherwise empty string
NEW_PROMPT="$NEW_PROMPT%{$fg[yellow]%}%(1j. %j.)%{$reset_color%}"
# Green virtualenv info, prefix with space (unless it's already done by jobs)
NEW_PROMPT="$NEW_PROMPT%{$fg[green]%}%(1j..$venv_padding)$venv%{$reset_color%}"
# Closing paren around pwd, any passed git information
NEW_PROMPT="$NEW_PROMPT)$git_info"
# Either green/red (based on previous command exit code) either %/# (depending on root)
NEW_PROMPT="$NEW_PROMPT %(?.%{$fg[green]%}%#%{$reset_color%} .%{$fg[red]%}%#%{$reset_color%} )"
PROMPT="$NEW_PROMPT"
}
setup-prompt ""
# http://www.anishathalye.com/2015/02/07/an-asynchronous-shell-prompt/
# https://github.com/anishathalye/dotfiles
ASYNC_PROC=0
function right-prompt() {
function async() {
# save to temp file
printf "%s" "$(RCMD)" > "$HOME/.zsh_tmp_prompt"
# signal parent
kill -s USR1 $$
}
# kill child if necessary
if [[ "${ASYNC_PROC}" != 0 ]]; then
kill -s HUP $ASYNC_PROC >/dev/null 2>&1 || :
fi
# start background computation
async &!
ASYNC_PROC=$!
}
add-zsh-hook precmd right-prompt
function TRAPUSR1() {
# read from temp file
setup-prompt "$(cat $HOME/.zsh_tmp_prompt)"
# reset proc number
ASYNC_PROC=0
# redisplay
zle && zle reset-prompt
}
# Right prompt + vim mode
# Reduce the lag switching into Normal mode to 0.1s
export KEYTIMEOUT=1
# Show vim mode on right
# http://dougblack.io/words/zsh-vi-mode.html
function zle-line-init zle-keymap-select {
VIM_PROMPT="[% NORMAL]%"
# Apparently EPS1 is not a typo
RPS1="${${KEYMAP/vicmd/$VIM_PROMPT}/(main|viins)/} $EPS1"
zle reset-prompt
}
zle -N zle-line-init
zle -N zle-keymap-select
# Force update of RPS1 immediately
reset_rps1() {
RPS1=""
}
add-zsh-hook precmd reset_rps1
|
import React, { Component } from 'react';
interface RichTextProps {
className?: string;
placeHolder?: string;
maxRows?: number;
lineHeight?: number;
// keyMapping: any;
onChange: any;
}
interface RichTextState {
// listenKeys: Set<any>;
// listenKeysMapping: Map<any, any>;
}
export class RichText extends Component<RichTextProps, RichTextState> {
richText: HTMLDivElement | null = null;
range: Range | null = null;
constructor(props: any) {
super(props);
this.state = {
// listenKeys: new Set<any>(),
// listenKeysMapping: new Map<any, any>(),
};
this.richText = null;
this.range = null;
}
componentDidMount(): void {
// const { keyMapping = []} = this.props;
}
saveRange = (e: any) => {
const sel = window.getSelection();
if (sel !== null) {
if (sel.rangeCount > 0) {
const range = sel.getRangeAt(0);
if (range instanceof Range) {
this.range = range;
}
}
}
};
onInputChange = () => {
const { onChange } = this.props;
if (onChange && typeof onChange === 'function') {
onChange(this.getNode());
}
};
getNode = () => this.richText!.innerHTML;
isAlive = () => {
const sel = window.getSelection();
if (sel != null) {
if (
sel.rangeCount > 0 &&
(this.richText === sel.focusNode || this.richText!.contains(sel.focusNode))
) {
return true;
}
}
return false;
};
autoFocus = () => {
if (this.isAlive()) {
return;
}
this.richText!.focus();
};
addNode = (content: any) => {
if (!content) {
return;
}
let insertNode: any = null;
if (content instanceof Node) {
insertNode = content;
} else if (typeof content === 'string') {
insertNode = document.createTextNode(content);
}
this.autoFocus();
const sel = window.getSelection();
if (sel !== null) {
if (sel.rangeCount > 0) {
const range = sel.getRangeAt(0);
range.deleteContents();
const frag = document.createDocumentFragment();
const lastNode = frag.appendChild(insertNode);
range.insertNode(frag);
const contentRange = range.cloneRange();
contentRange.setStartAfter(lastNode);
sel.removeAllRanges();
sel.addRange(contentRange);
}
}
this.onInputChange();
};
clearNode = () => {
if (this.richText !== null) {
this.richText.innerHTML = '';
}
this.onInputChange();
};
delNode = (n: any) => {
this.autoFocus();
const sel = window.getSelection();
if (sel !== null) {
if (sel.rangeCount > 0) {
const range = sel.getRangeAt(0);
let newOffset = range.startOffset - n > 0 ? range.startOffset - n : 0;
if (newOffset > range.startContainer.toString().length) {
newOffset = range.startContainer.toString().length;
}
range.setStart(range.startContainer, newOffset);
range.deleteContents();
const contentRange = range.cloneRange();
sel.removeAllRanges();
sel.addRange(contentRange);
}
}
this.onInputChange();
};
getInputText = () => {
if (!this.isAlive()) {
return '';
}
const sel = window.getSelection();
if (sel !== null) {
if (sel.rangeCount > 0) {
const range = sel.getRangeAt(0);
if (range.startContainer.nodeType === 3) {
const offset = range.startOffset;
if (
range !== null &&
range.startContainer != null &&
range.startContainer.nodeValue != null
) {
return range.startContainer.nodeValue.slice(0, offset);
}
}
}
}
return '';
};
render() {
const { className, maxRows = 1, lineHeight = 30 } = this.props;
return (
<div
className={className ? `rich-text-input ${className}` : 'rich-text-input'}
style={{
lineHeight: `${lineHeight}px`,
maxHeight: `${17 + lineHeight * maxRows}px`,
width: '200px',
border: '1px solid #000',
}}
contentEditable
suppressContentEditableWarning
onSelect={this.saveRange}
ref={el => {
this.richText = el;
}}
/>
);
}
}
|
<reponame>cschladetsch/KAI
#pragma once
#include <KAI/Core/Base.h>
// TODO: remove this and have DotGraph use Pimpl
// not very important as few things include this file.
#include <set>
KAI_BEGIN
// Generates GraphViz script showing the depedancy graph given an object _root
struct DotGraph : StringStream
{
private:
std::set<Label> excluded_names;
std::set<Type::Number> excluded_types;
public:
DotGraph();
DotGraph(Object, String const &filename);
void Generate(Object, String const &filename);
void ExcludeLabel(Label const &);
template <class T>
void ExcludeType()
{
excluded_types.insert(Type::Traits<T>::Number);
}
void WriteHeader();
void WriteFooter();
void WriteToFile(const char *);
void Add(Object);
String GetText();
friend DotGraph &operator<<(DotGraph &graph, Object const &object);
protected:
bool IsExcluded(Object const&) const;
};
DotGraph &operator<<(DotGraph &graph, Object const &object);
KAI_END
|
<html>
<head>
<title>My Blog</title>
<style>
#main-article{
float: left;
width: 80%;
padding: 10px;
}
.side-bar{
float: right;
width: 20%;
padding: 10px;
}
</style>
</head>
<body>
<div id="main-article">
Main article goes here...
</div>
<div class="side-bar">
Sidebar content goes here...
</div>
</body>
</html> |
import React from "react";
import Layout from "../components/layout";
import "materialize-css";
import "materialize-css/dist/css/materialize.min.css";
import { Parallax } from "react-materialize";
const LandingPage = () => (
<Layout>
<div>
<Parallax imageSrc="https://images.unsplash.com/photo-1506197061617-7f5c0b093236?ixlib=rb-0.3.5&ixid=eyJhcHBfaWQiOjEyMDd9&s=418764a3f148dde8a8debcea492f9156&auto=format&fit=crop&w=1000&q=80" />
<div className="section white">
<div className="row container">
<h2 className="header">Go Or Owe</h2>
<p className="grey-text text-darken-3 lighten-3">
This Shit Better Be Working!
</p>
</div>
</div>
<Parallax imageSrc="https://images.unsplash.com/photo-1534258936925-c58bed479fcb?ixlib=rb-0.3.5&ixid=eyJhcHBfaWQiOjEyMDd9&s=de05b46a8ac91fcff2b134811e62d79f&auto=format&fit=crop&w=1000&q=80" />
</div>
</Layout>
);
export default LandingPage;
|
"use strict";
const db = require("../server/db");
const {User} = require("../server/db/models");
const {Transaction} = require("../server/db/models");
async function seed() {
await db.sync({force: true});
console.log("db synced!");
const users = await Promise.all([
User.create({email: "<EMAIL>",
password: "<PASSWORD>*",
firstName: "Cody",
lastName: "Pickles"}),
User.create({email: "<EMAIL>",
password: "<PASSWORD>*",
firstName: "John",
lastName: "Murphy"
}),
User.create({
email: "<EMAIL>",
password: "<PASSWORD>*",
admin: true,
username: "thomas",
firstName: "tom",
lastName: "smih",
apt: "2",
street: "Main st",
houseNumber: "111",
zipcode: "11111",
state: "NY"
})
]);
const transactions = await Promise.all([
Transaction.create({
ticker: "TSLA",
priceAtTransaction: 800.03,
quantity: 20,
userId: 1,
}),
Transaction.create({
ticker: "AMZN",
priceAtTransaction: 2134.87,
quantity: 500,
userId: 1,
}),
Transaction.create({
ticker: "IBM",
priceAtTransaction: 150.7,
quantity: 50,
userId: 1,
sold: true,
}),
Transaction.create({
ticker: "ORCL",
priceAtTransaction: 55.47,
quantity: 150,
userId: 3,
})
])
console.log(`seeded ${transactions.length} transactions`)
console.log(`seeded ${users.length} users`);
console.log(`seeded successfully`);
}
// We've separated the `seed` function from the `runSeed` function.
// This way we can isolate the error handling and exit trapping.
// The `seed` function is concerned only with modifying the database.
async function runSeed() {
console.log("seeding...");
try {
await seed();
} catch (err) {
console.error(err);
process.exitCode = 1;
} finally {
console.log("closing db connection");
await db.close();
console.log("db connection closed");
}
}
// Execute the `seed` function, IF we ran this module directly (`node seed`).
// `Async` functions always return a promise, so we can use `catch` to handle
// any errors that might occur inside of `seed`.
if (module === require.main) {
runSeed();
}
// we export the seed function for testing purposes (see `./seed.spec.js`)
module.exports = seed;
|
<reponame>TheDadi/polyfill-library
'Symbol' in this && 'asyncIterator' in this.Symbol
|
#!/bin/bash
# Autor: Izzy Fayon
# Ver : 1.0
# 1.0 : Initial script
account=$1
destination_storage_dir=$2
rewriteInDestination=$3
path=$4
base_dir="/home/cpq"
dbUserName="root"
dbPassword="root"
RED='\033[0;31m'
GRE='\033[0;32m'
YEL='\033[0;33m'
NOC='\033[0m'
# Check if account data file exists #
if ! [ -f ${destination_storage_dir}/${account}.zip ]; then
echo -e "\n$(date +%Y-%m-%d" "%H:%M:%S) ${RED}[ERROR] Account data file doesn't exist. Process stopped !!!${NOC}"
exit 1
fi
echo "$(date +%Y-%m-%d" "%H:%M:%S) [INFO] Start importing data of $account"
if [ -z $path ]; then
path="${destination_storage_dir}/account_transfer_dump"
fi
getDBName() {
string=$1
a=$(echo $string |awk -v s=. '{print index($1,s)}')
echo ${string:0:$a-1}
}
if [ ! -d "$path" ]; then
mkdir "$path"
else
rm -rf "$path"
mkdir "$path"
fi
file=$(find $destination_storage_dir -name $account.zip -type f)
unzipInstalled=$(which unzip | wc -l)
if [ $unzipInstalled -eq 0 ]; then
apt install unzip
fi
echo "$(date +%Y-%m-%d" "%H:%M:%S) [INFO] Extract data of $account from $account.zip started"
unzip $file -d $path
if [ $(ls -la ${destination_storage_dir}/account_transfer_dump | wc -l) -gt 5 ]; then
echo "$(date +%Y-%m-%d" "%H:%M:%S) [INFO] Extract data of $account from $account.zip finished"
else
echo -e "\n$(date +%Y-%m-%d" "%H:%M:%S) ${RED}[ERROR] Missig files from $account.zip or $account.zip is corrupted. Process stopped !!!${NOC}"
exit 1
fi
reorgRoutineExists=$(mysql -N -s -u$dbUserName -p$dbPassword -h localhost -e"SELECT count(1) from information_schema.routines where routine_name='dump_entire_partition_account_reorganize' and routine_schema='db_manager'" 2>&1 | grep -v mysql:)
if [[ $reorgRoutineExists -eq 0 ]]; then
echo -e "\n$(date +%Y-%m-%d" "%H:%M:%S) ${RED}[ERROR] Missig routine db_manager.dump_entire_partition_account_reorganize on destination MySQL. Process stopped !!!${NOC}"
exit 1
fi
if [ $rewriteInDestination == "N" ]; then
if [ -f ${destination_storage_dir}/${account}.sh ]; then
#chmod +x $base_dir/${account}.sh
echo "$(date +%Y-%m-%d" "%H:%M:%S) [INFO] Start reorganizing partitions on the DB."
${destination_storage_dir}/${account}.sh
else
echo -e "\n$(date +%Y-%m-%d" "%H:%M:%S) ${RED}[ERROR] Missig file $account.sh. Process stopped!!!${NOC}"
exit 1
fi
fi
for i in `ls $path/*.sql` ; do
db=$(getDBName $i | sed "s|$path||g" | sed "s/\///g")
echo "$(date +%Y-%m-%d" "%H:%M:%S) [INFO] Insert data from $i"
mysql -N -s -u$dbUserName -p$dbPassword -D $db 2>&1 < $i | grep -v mysql:
done
# Delete old vanish files #
#if [ $(find ${base_dir}/*.atrn | wc -l) -gt 0 ]; then
# for path in `find ${base_dir}/*.atrn | cut -d'.' -f 1 `;
# do
# sudo rm -rf ${path}.*
# done
#fi
echo -e "\n$(date +%Y-%m-%d" "%H:%M:%S) ${GRE}[INFO] Transfer $account data finished.${NOC}" |
import TimeSyncController from '../../src/streaming/controllers/TimeSyncController';
import Events from '../../src/core/events/Events';
import EventBus from '../../src/core/EventBus';
import Settings from '../../src/core/Settings';
import ErrorHandlerMock from './mocks/ErrorHandlerMock';
const expect = require('chai').expect;
const context = {};
const eventBus = EventBus(context).getInstance();
const errHandlerMock = new ErrorHandlerMock();
const sinon = require('sinon');
describe('TimeSyncController', function () {
let timeSyncController;
let settings = Settings(context).getInstance();
beforeEach(function () {
global.XMLHttpRequest = sinon.useFakeXMLHttpRequest();
this.requests = [];
global.XMLHttpRequest.onCreate = function (xhr) {
this.requests.push(xhr);
}.bind(this);
timeSyncController = TimeSyncController(context).getInstance();
timeSyncController.setConfig({
settings,
errHandler: errHandlerMock
});
});
afterEach(function () {
timeSyncController.reset();
timeSyncController = null;
settings.reset();
});
it('should trigger TIME_SYNCHRONIZATION_COMPLETED when time source is not defined and no date header is used', function (done) {
function onCompleted() {
eventBus.off(Events.TIME_SYNCHRONIZATION_COMPLETED, onCompleted, this);
done();
}
eventBus.on(Events.TIME_SYNCHRONIZATION_COMPLETED, onCompleted, this);
settings.update({ streaming: { utcSynchronization: {useManifestDateHeaderTimeSource: false }} });
timeSyncController.initialize();
timeSyncController.attemptSync([]);
});
it('should trigger UPDATE_TIME_SYNC_OFFSET when time source is not defined and no date header is used', function (done) {
function onCompleted(e) {
eventBus.off(Events.UPDATE_TIME_SYNC_OFFSET, onCompleted, this);
check(done, function () {
expect(e.offset).to.be.NaN
});
}
eventBus.on(Events.UPDATE_TIME_SYNC_OFFSET, onCompleted, this);
settings.update({ streaming: { utcSynchronization: {useManifestDateHeaderTimeSource: false }} });
timeSyncController.initialize();
timeSyncController.attemptSync([], true);
});
it('should synchronize time when time source is defined', function (done) {
let self = this;
let date = new Date();
function onCompleted() {
eventBus.off(Events.TIME_SYNCHRONIZATION_COMPLETED, onCompleted, this);
done();
}
eventBus.on(Events.TIME_SYNCHRONIZATION_COMPLETED, onCompleted, this);
timeSyncController.initialize();
timeSyncController.attemptSync([{
schemeIdUri: 'urn:mpeg:dash:utc:http-xsdate:2014',
value: 'https://time.akamai.com/?iso'
}], true);
// simulate a response
self.requests[0].respond(200, {
'Content-Type': 'text/plain; charset=ISO-8859-1'
}, date.toString());
});
it('should calculate offset when time source is defined', function (done) {
let self = this;
let date = new Date();
function onCompleted(e) {
eventBus.off(Events.UPDATE_TIME_SYNC_OFFSET, onCompleted, this);
check(done, function () {
expect(e.offset).to.be.a('number');
});
}
eventBus.on(Events.UPDATE_TIME_SYNC_OFFSET, onCompleted, this);
timeSyncController.initialize();
timeSyncController.attemptSync([{
schemeIdUri: 'urn:mpeg:dash:utc:http-xsdate:2014',
value: 'https://time.akamai.com/?iso'
}], true);
// simulate a response
self.requests[0].respond(200, {
'Content-Type': 'text/plain; charset=ISO-8859-1'
}, date.toString());
});
});
function check(done, f) {
try {
f();
done();
} catch (e) {
done(e);
}
}
|
const bubbleSort = ( arr ) => {
let swappedValue;
do {
swappedValue = false;
for ( let i = 0; i < arr.length; i++ ) {
console.log( arr );
if ( arr[ i ] > arr[ i + 1 ] ) { // [2, 1, 3]
let temp = arr[ i ]; // temp = 2
arr[ i ] = arr[ i + 1 ]; // [1, 1, 3] temp = 2;
arr[ i + 1 ] = temp; // [1, 2, 3];
swappedValue = true;
}
}
} while ( swappedValue );
return arr;
};
console.log( bubbleSort( [ 5, 6, 7, 3, 2, 1 ] ) ); // returns [1, 2, 3]
|
def is_divisible_by_7(number):
return number % 7 == 0
result = is_divisible_by_7(42)
print(result) # True |
function checkLeapYear(year) {
let leap = new Date(year, 1, 29).getDate() === 29;
if (leap) {
console.log(year + ' is a leap year');
} else {
console.log(year + ' is not a leap year');
}
}
let year = prompt('Enter a year:');
checkLeapYear(year); |
import { task } from "../internal/core/config/config-env";
import { getClient } from "../lib/client";
import { PolarRuntimeEnvironment, TaskArguments } from "../types";
import { TASK_NODE_INFO } from "./task-names";
export default function (): void {
task(TASK_NODE_INFO, "Prints node info and status")
.setAction(nodeInfo);
}
async function nodeInfo (_taskArgs: TaskArguments, env: PolarRuntimeEnvironment): Promise<void> {
const client = getClient(env.network);
console.log("Network:", env.network.name);
console.log("ChainId:", await client.getChainId());
console.log("Block height:", await client.getHeight());
const nodeInfo = await client.restClient.nodeInfo()
// eslint-disable-next-line
.catch((err) => { throw new Error(`Could not fetch node info: ${err}`); });
console.log('Node Info: ', nodeInfo);
}
|
###-begin-karma-completion-###
#
# karma command completion script
# This is stolen from npm. Thanks @isaac!
#
# Installation: karma completion >> ~/.bashrc (or ~/.zshrc)
# Or, maybe: karma completion > /usr/local/etc/bash_completion.d/karma
#
if type complete &>/dev/null; then
__karma_completion () {
local si="$IFS"
IFS=$'\n' COMPREPLY=($(COMP_CWORD="$COMP_CWORD" \
COMP_LINE="$COMP_LINE" \
COMP_POINT="$COMP_POINT" \
karma completion -- "${COMP_WORDS[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
complete -F __karma_completion karma
elif type compdef &>/dev/null; then
__karma_completion() {
si=$IFS
compadd -- $(COMP_CWORD=$((CURRENT-1)) \
COMP_LINE=$BUFFER \
COMP_POINT=0 \
karma completion -- "${words[@]}" \
2>/dev/null)
IFS=$si
}
compdef __karma_completion karma
elif type compctl &>/dev/null; then
__karma_completion () {
local cword line point words si
read -Ac words
read -cn cword
let cword-=1
read -l line
read -ln point
si="$IFS"
IFS=$'\n' reply=($(COMP_CWORD="$cword" \
COMP_LINE="$line" \
COMP_POINT="$point" \
karma completion -- "${words[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
compctl -K __karma_completion karma
fi
###-end-karma-completion-###
|
clear
python bot.py |
#!/bin/bash
#
# Jobscript for launching dcmip2012 test2-0 on the NERSC Cori machine
#
# usage: sbatch jobscript-...
#SBATCH -J d20-theta # job name
#SBATCH -o out_dcmip2-0.o%j # output and error file name (%j expands to jobID)
#SBATCH -n 448 # total number of mpi tasks requested
#SBATCH -p regular # queue (partition)
#SBATCH -t 03:00:00 # run time (hh:mm:ss)
#SBATCH -A acme # charge hours to account 1
#SBATCH -C haswell # use Haswell nodes
# hydrostatic theta
date
EXEC=../../../test_execs/theta-nlev30/theta-nlev30
cp ./namelist-h.nl input.nl
srun -n 448 $EXEC < ./input.nl
ncl plot_z_lon.ncl
ncl test200-range.ncl
mv -f dcmip2012_test2_0_u_t6.00.pdf hydro_test2_0_u_z.pdf
mv -f movies/dcmip2012_test2_01.nc.pdf hydro_test2_0_u.pdf
mv -f movies/dcmip2012_test2_01.nc movies/hydro_dcmip2012_test2_01.nc
# nonhydrostatic theta
EXEC=../../../test_execs/theta-nlev30/theta-nlev30
cp ./namelist-nh.nl input.nl
srun -n 448 $EXEC < ./input.nl
date
ncl plot_z_lon.ncl
ncl test200-range.ncl
mv -f dcmip2012_test2_0_u_t6.00.pdf nonhydro_test2_0_u_z.pdf
mv -f movies/dcmip2012_test2_01.nc.pdf nonhydro_test2_0_u.pdf
mv -f movies/dcmip2012_test2_01.nc movies/nonhydro_dcmip2012_test2_01.nc
date
|
<filename>controller/f2_non_biodegradable_dbin.js
const nodemailer = require('nodemailer'),
config = require('../config.json'),
transporter = nodemailer.createTransport({
service: config.service,
auth: {
user: config["email-id"],
pass: config["email-pass"]
}
}),
distance = require('../_helpers/_helpers_distance'),
assignGC = require('../_helpers/_helpers_assignGarbageCollector')
exports.f2_nbController = async function(req,res,next){
let recipientEmailAddress = await assignGC.assignedGC();
let dist = await distance.distance()
var mailOptions = {
from: config["email-id"],
to: recipientEmailAddress,
subject: 'Peak Value for Floor 2 - Building XYZ , Landmark ABC, City : ### - Non Biodegradable Dustbin Reached',
text: 'Please Reach Out To Apartment, XYZ - Floor 2 For Collection Of Wastage @ c/o Non Biodegradable Dustbin.' + 'Distance From Your Location To Appartment Is: ' + dist + 'km.'
};
for(var g0_nbio = 1;g0_nbio<=10;g0_nbio++){
//considering 70% of wastage as threshold value.
if(g0_nbio == 7)
{
transporter.sendMail(mailOptions, function(error, info){
if (error) {
res.send({"peakValueReached":true,"Error":error.toString()})
} else {
res.send({"peakValueReached":true,"emailNotificationSent":true,"successLogs":info.response})
}
});
}
else
{
g0_nbio+1;
}
}
} |
import Ember from 'ember';
const { keys, create } = Object; // jshint ignore:line
const { computed, observer, $, run, on, typeOf, debug, isPresent } = Ember; // jshint ignore:line
const { defineProperty, get, set, inject, isEmpty, merge } = Ember; // jshint ignore:line
const a = Ember.A; // jshint ignore:line
export default Ember.Mixin.create({
});
|
//#####################################################################
// Copyright 2011.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Class RIGID_TRIANGLE_COLLISIONS_GEOMETRY
//#####################################################################
#include <PhysBAM_Tools/Arrays/INDIRECT_ARRAY.h>
#include <PhysBAM_Tools/Log/LOG.h>
#include <PhysBAM_Geometry/Basic_Geometry/SEGMENT_2D.h>
#include <PhysBAM_Geometry/Basic_Geometry/TRIANGLE_3D.h>
#include <PhysBAM_Geometry/Implicit_Objects/IMPLICIT_OBJECT_TRANSFORMED.h>
#include <PhysBAM_Geometry/Spatial_Acceleration/SEGMENT_HIERARCHY.h>
#include <PhysBAM_Geometry/Spatial_Acceleration/TRIANGLE_HIERARCHY.h>
#include <PhysBAM_Geometry/Topology_Based_Geometry/HEXAHEDRALIZED_VOLUME.h>
#include <PhysBAM_Geometry/Topology_Based_Geometry/TETRAHEDRALIZED_VOLUME.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Collisions/RIGID_STRUCTURE_INTERACTION_GEOMETRY.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Collisions/RIGID_TRIANGLE_COLLISIONS_GEOMETRY.h>
#include <PhysBAM_Solids/PhysBAM_Rigids/Parallel_Computation/MPI_RIGIDS.h>
using namespace PhysBAM;
//#####################################################################
// Constructor
//#####################################################################
template<class TV> RIGID_TRIANGLE_COLLISIONS_GEOMETRY<TV>::
RIGID_TRIANGLE_COLLISIONS_GEOMETRY()
:mpi_solids(0),mass_modifier(0)
{
// set parameters
Allow_Intersections(false);Set_Allow_Intersections_Tolerance();
// output
Output_Number_Checked(false);Set_Small_Number();
}
//#####################################################################
// Destructor
//#####################################################################
template<class TV> RIGID_TRIANGLE_COLLISIONS_GEOMETRY<TV>::
~RIGID_TRIANGLE_COLLISIONS_GEOMETRY()
{
structure_geometries.Delete_Pointers_And_Clean_Memory();
}
//#####################################################################
// Function Build_Collision_Geometry
//#####################################################################
template<class TV> void RIGID_TRIANGLE_COLLISIONS_GEOMETRY<TV>::
Build_Collision_Geometry()
{
structure_geometries.Delete_Pointers_And_Clean_Memory();
structure_geometries.Resize(structures.m);
interacting_structure_pairs.Remove_All();
for(int k=1;k<=structures.m;k++){
if(MESH_OBJECT<TV,TRIANGLE_MESH>* mesh=dynamic_cast<MESH_OBJECT<TV,TRIANGLE_MESH>*>(structures(k))) structure_geometries(k)=new RIGID_STRUCTURE_INTERACTION_GEOMETRY<TV>(mesh->particles);
else PHYSBAM_FATAL_ERROR("Geometry type is not supported");
structure_geometries(k)->Build_Collision_Geometry(*structures(k));}
for(int i=1;i<=structures.m;i++) for(int j=i+1;j<=structures.m;j++) interacting_structure_pairs.Append(VECTOR<int,2>(i,j));
}
//#####################################################################
// Function Build_Topological_Structure_Of_Hierarchies
//#####################################################################
template<class TV> void RIGID_TRIANGLE_COLLISIONS_GEOMETRY<TV>::
Build_Topological_Structure_Of_Hierarchies()
{
for(int k=1;k<=structure_geometries.m;k++){
structure_geometries(k)->Build_Topological_Structure_Of_Hierarchies();}
//if(mpi_solids) structure_geometries(k)->Update_Processor_Masks(mpi_solids->Partition(),
// mpi_solids->partition_id_from_particle_index);}
}
//#####################################################################
// Function Allow_Intersections
//#####################################################################
template<class TV> void RIGID_TRIANGLE_COLLISIONS_GEOMETRY<TV>::
Allow_Intersections(const bool allow_intersections_input)
{
allow_intersections=allow_intersections_input;
if(allow_intersections)
for(int k=1;k<=structure_geometries.m;k++)
if(!structure_geometries(k)->triangulated_surface->mesh.element_edges) structure_geometries(k)->triangulated_surface->mesh.Initialize_Element_Edges();
}
//#####################################################################
// Function Save_Self_Collision_Free_State
//#####################################################################
template<class TV> void RIGID_TRIANGLE_COLLISIONS_GEOMETRY<TV>::
Save_Current_State(const ARRAY_VIEW<TV>& X,const ARRAY_VIEW<ROTATION<TV> >& rotation,const ARRAY_VIEW<TV>& V) // assumes mass does not change
{
for(int i=1;i<=structure_geometries.m;i++) structure_geometries(i)->Save_Current_State(X(i),rotation(i),V(i));
}
//#####################################################################
// Function Save_Self_Collision_Free_State
//#####################################################################
template<class TV> void RIGID_TRIANGLE_COLLISIONS_GEOMETRY<TV>::
Save_Self_Collision_Free_State(const ARRAY_VIEW<TV>& X,const ARRAY_VIEW<ROTATION<TV> >& rotation,const ARRAY_VIEW<TV>& V) // assumes mass does not change
{
for(int i=1;i<=structure_geometries.m;i++) structure_geometries(i)->Save_Self_Collision_Free_State(X(i),rotation(i),V(i));
}
//#####################################################################
// Function Save_Self_Collision_Free_State
//#####################################################################
template<class TV> void RIGID_TRIANGLE_COLLISIONS_GEOMETRY<TV>::
Save_Self_Collision_Free_State() // assumes mass does not change
{
for(int i=1;i<=structure_geometries.m;i++) structure_geometries(i)->Save_Self_Collision_Free_State();
}
//#####################################################################
// Function Restore_Self_Collision_Free_State
//#####################################################################
template<class TV> void RIGID_TRIANGLE_COLLISIONS_GEOMETRY<TV>::
Restore_Self_Collision_Free_State()
{
for(int i=1;i<=structure_geometries.m;i++) structure_geometries(i)->Restore_Self_Collision_Free_State();
}
//#####################################################################
// Function Compute_Intersecting_Segment_Face_Pairs
//#####################################################################
template<class TV> void RIGID_TRIANGLE_COLLISIONS_GEOMETRY<TV>::
Compute_Intersecting_Segment_Face_Pairs()
{
PHYSBAM_NOT_IMPLEMENTED();
}
//####################################################################
#define INSTANTIATION_HELPER(T,d) \
template class RIGID_TRIANGLE_COLLISIONS_GEOMETRY<VECTOR<T,d> >;
INSTANTIATION_HELPER(float,1);
INSTANTIATION_HELPER(float,2);
INSTANTIATION_HELPER(float,3);
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
INSTANTIATION_HELPER(double,1);
INSTANTIATION_HELPER(double,2);
INSTANTIATION_HELPER(double,3);
#endif
|
<filename>szdc/parser/xlsx/converter_app_n/src/main/java/com/converter_app_n/StationDataRow.java
package com.converter_app_n;
import lombok.Data;
@Data
public class StationDataRow {
private final String station;
private RegularTime regularTime;
private MovementTime arrivalTime;
private MovementTime departureTime;
public StationDataRow(String station) {
this.station = station;
}
}
|
<filename>core/src/games/stendhal/client/sprite/SequenceSprite.java
/***************************************************************************
* (C) Copyright 2003-2012 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.sprite;
import temp.java.awt.Graphics;
/**
* An animated sprite that draw the frames once, and then signals the attached
* listener.
*/
public class SequenceSprite extends AnimatedSprite {
private final SequenceSpriteListener listener;
/** A flag for book keeping so that the listener gets notified only once. */
private boolean done;
/**
* Create a new SequenceSprite with constant delay between the frames.
*
* @param listener listener that gets notified when drawing the sprite
* sequence has finished
* @param frames animation frames
* @param delay delay between frames in milliseconds
*/
public SequenceSprite(SequenceSpriteListener listener, Sprite[] frames, int delay) {
super(frames, delay);
loop = false;
lastUpdate = System.currentTimeMillis();
this.listener = listener;
}
@Override
public void draw(final Graphics g, final int x, final int y) {
super.draw(g, x, y);
if ((sprite == null) && !done) {
listener.endSequence();
done = true;
}
}
/**
* Interface for objects that receive notifications at the end of the
* sprite sequence.
*/
public interface SequenceSpriteListener {
/**
* Called when all the image frames have been drawn.
*/
void endSequence();
}
}
|
<reponame>Se-Gl/omencss
export const primaryColors = [
{ className: 'bg-white', colorName: 'White', colorHex: '#fdfdfd' },
{ className: 'bg-black', colorName: 'Black', colorHex: '#101010' },
{ className: 'bg-greencss', colorName: 'GreenCSS', colorHex: '#3e7a02' },
{ className: 'bg-green', colorName: 'Green', colorHex: '#80f906' }
]
export const secondaryColors = [
{ className: 'bg-red', colorName: 'Red', colorHex: '#f90606' },
{ className: 'bg-orange', colorName: 'Orange', colorHex: '#f99106' },
{ className: 'bg-yellow', colorName: 'Yellow', colorHex: '#f9cf06' },
{ className: 'bg-blue', colorName: 'Blue', colorHex: '#066ef9' },
{ className: 'bg-turquoise', colorName: 'Turquoise', colorHex: '#06f9f9' },
{ className: 'bg-purple', colorName: 'Purple', colorHex: '#8006f9' },
{ className: 'bg-magenta', colorName: 'Magenta', colorHex: '#f906e8' }
]
export const spotColors = [
{ className: 'bg-red-1', colorName: 'red-1', colorHex: '#fa1e1e' },
{ className: 'bg-red-2', colorName: 'red-2', colorHex: '#fa3535' },
{ className: 'bg-red-3', colorName: 'red-3', colorHex: '#fb4d4d' },
{ className: 'bg-red-4', colorName: 'red-4', colorHex: '#fb6565' },
{ className: 'bg-red-5', colorName: 'red-5', colorHex: '#fc7c7c' },
{ className: 'bg-red-6', colorName: 'red-6', colorHex: '#fc9494' },
{ className: 'bg-red-7', colorName: 'red-7', colorHex: '#fdacac' },
{ className: 'bg-red-8', colorName: 'red-8', colorHex: '#fec3c3' },
{ className: 'bg-red-9', colorName: 'red-9', colorHex: '#fedbdb' },
{ className: 'bg-red-10', colorName: 'red-10', colorHex: '#fff3f3' },
{ className: 'bg-orange-1', colorName: 'orange-1', colorHex: '#fa9b1e' },
{ className: 'bg-orange-2', colorName: 'orange-2', colorHex: '#faa635' },
{ className: 'bg-orange-3', colorName: 'orange-3', colorHex: '#fbb04d' },
{ className: 'bg-orange-4', colorName: 'orange-4', colorHex: '#fbbb65' },
{ className: 'bg-orange-5', colorName: 'orange-5', colorHex: '#fcc57c' },
{ className: 'bg-orange-6', colorName: 'orange-1', colorHex: '#fcd094' },
{ className: 'bg-orange-7', colorName: 'orange-2', colorHex: '#fddaac' },
{ className: 'bg-orange-8', colorName: 'orange-3', colorHex: '#fee5c3' },
{ className: 'bg-orange-9', colorName: 'orange-4', colorHex: '#feefdb' },
{ className: 'bg-orange-10', colorName: 'orange-5', colorHex: '#fffaf3' },
{ className: 'bg-yellow-1', colorName: 'yellow-1', colorHex: '#fad41e' },
{ className: 'bg-yellow-2', colorName: 'yellow-2', colorHex: '#fad835' },
{ className: 'bg-yellow-3', colorName: 'yellow-3', colorHex: '#fbdd4d' },
{ className: 'bg-yellow-4', colorName: 'yellow-4', colorHex: '#fbe165' },
{ className: 'bg-yellow-5', colorName: 'yellow-5', colorHex: '#fce67c' },
{ className: 'bg-yellow-6', colorName: 'yellow-6', colorHex: '#fcea94' },
{ className: 'bg-yellow-7', colorName: 'yellow-7', colorHex: '#fdefac' },
{ className: 'bg-yellow-8', colorName: 'yellow-8', colorHex: '#fef3c3' },
{ className: 'bg-yellow-9', colorName: 'yellow-9', colorHex: '#fef8db' },
{ className: 'bg-yellow-10', colorName: 'yellow-10', colorHex: '#fffdf3' },
{ className: 'bg-greencss-1', colorName: 'greencss-1', colorHex: '#4a9202' },
{ className: 'bg-greencss-2', colorName: 'greencss-2', colorHex: '#56aa03' },
{ className: 'bg-greencss-3', colorName: 'greencss-3', colorHex: '#62c203' },
{ className: 'bg-greencss-4', colorName: 'greencss-4', colorHex: '#6ed904' },
{ className: 'bg-greencss-5', colorName: 'greencss-5', colorHex: '#7bf104' },
{ className: 'bg-greencss-6', colorName: 'greencss-6', colorHex: '#87fb12' },
{ className: 'bg-greencss-7', colorName: 'greencss-7', colorHex: '#93fc2a' },
{ className: 'bg-greencss-8', colorName: 'greencss-8', colorHex: '#9ffc42' },
{ className: 'bg-greencss-9', colorName: 'greencss-9', colorHex: '#abfc5a' },
{ className: 'bg-greencss-10', colorName: 'greencss-10', colorHex: '#b7fd72' },
{ className: 'bg-green-1', colorName: 'green-1', colorHex: '#8cfa1e' },
{ className: 'bg-green-2', colorName: 'green-2', colorHex: '#98fa35' },
{ className: 'bg-green-3', colorName: 'green-3', colorHex: '#a4fb4d' },
{ className: 'bg-green-4', colorName: 'green-4', colorHex: '#b0fb65' },
{ className: 'bg-green-5', colorName: 'green-5', colorHex: '#bcfc7c' },
{ className: 'bg-green-6', colorName: 'green-6', colorHex: '#c8fc94' },
{ className: 'bg-green-7', colorName: 'green-7', colorHex: '#d4fdac' },
{ className: 'bg-green-8', colorName: 'green-8', colorHex: '#e1fec3' },
{ className: 'bg-green-9', colorName: 'green-9', colorHex: '#edfedb' },
{ className: 'bg-green-10', colorName: 'green-10', colorHex: '#f9fff3' },
{ className: 'bg-blue-1', colorName: 'blue-1', colorHex: '#1e7cfa' },
{ className: 'bg-blue-2', colorName: 'blue-2', colorHex: '#358afa' },
{ className: 'bg-blue-3', colorName: 'blue-3', colorHex: '#4d97fb' },
{ className: 'bg-blue-4', colorName: 'blue-4', colorHex: '#65a5fb' },
{ className: 'bg-blue-5', colorName: 'blue-5', colorHex: '#7cb3fc' },
{ className: 'bg-blue-6', colorName: 'blue-6', colorHex: '#94c1fc' },
{ className: 'bg-blue-7', colorName: 'blue-7', colorHex: '#accefd' },
{ className: 'bg-blue-8', colorName: 'blue-8', colorHex: '#c3dcfe' },
{ className: 'bg-blue-9', colorName: 'blue-9', colorHex: '#dbeafe' },
{ className: 'bg-blue-10', colorName: 'blue-10', colorHex: '#f3f8ff' },
{ className: 'bg-turquoise-1', colorName: 'turquoise-1', colorHex: '#1efafa' },
{ className: 'bg-turquoise-2', colorName: 'turquoise-2', colorHex: '#35fafa' },
{ className: 'bg-turquoise-3', colorName: 'turquoise-3', colorHex: '#4dfbfb' },
{ className: 'bg-turquoise-4', colorName: 'turquoise-4', colorHex: '#65fbfb' },
{ className: 'bg-turquoise-5', colorName: 'turquoise-5', colorHex: '#7cfcfc' },
{ className: 'bg-turquoise-6', colorName: 'turquoise-6', colorHex: '#94fcfc' },
{ className: 'bg-turquoise-7', colorName: 'turquoise-7', colorHex: '#acfdfd' },
{ className: 'bg-turquoise-8', colorName: 'turquoise-8', colorHex: '#c3fefe' },
{ className: 'bg-turquoise-9', colorName: 'turquoise-9', colorHex: '#dbfefe' },
{ className: 'bg-turquoise-10', colorName: 'turquoise-10', colorHex: '#f3ffff' },
{ className: 'bg-purple-1', colorName: 'purple-1', colorHex: '#8c1efa' },
{ className: 'bg-purple-2', colorName: 'purple-2', colorHex: '#9835fa' },
{ className: 'bg-purple-3', colorName: 'purple-3', colorHex: '#a44dfb' },
{ className: 'bg-purple-4', colorName: 'purple-4', colorHex: '#b065fb' },
{ className: 'bg-purple-5', colorName: 'purple-5', colorHex: '#bc7cfc' },
{ className: 'bg-purple-6', colorName: 'purple-6', colorHex: '#c894fc' },
{ className: 'bg-purple-7', colorName: 'purple-7', colorHex: '#d4acfd' },
{ className: 'bg-purple-8', colorName: 'purple-8', colorHex: '#e1c3fe' },
{ className: 'bg-purple-9', colorName: 'purple-9', colorHex: '#eddbfe' },
{ className: 'bg-purple-10', colorName: 'purple-10', colorHex: '#f9f3ff' },
{ className: 'bg-magenta-1', colorName: 'magenta-1', colorHex: '#fa1eea' },
{ className: 'bg-magenta-2', colorName: 'magenta-2', colorHex: '#fa35ec' },
{ className: 'bg-magenta-3', colorName: 'magenta-3', colorHex: '#fb4def' },
{ className: 'bg-magenta-4', colorName: 'magenta-4', colorHex: '#fb65f1' },
{ className: 'bg-magenta-5', colorName: 'magenta-5', colorHex: '#fc7cf3' },
{ className: 'bg-magenta-6', colorName: 'magenta-6', colorHex: '#fc94f5' },
{ className: 'bg-magenta-7', colorName: 'magenta-7', colorHex: '#fdacf7' },
{ className: 'bg-magenta-8', colorName: 'magenta-8', colorHex: '#fec3f9' },
{ className: 'bg-magenta-9', colorName: 'magenta-9', colorHex: '#fedbfc' },
{ className: 'bg-magenta-10', colorName: 'magenta-10', colorHex: '#fff3fe' },
{ className: 'bg-black-1', colorName: 'black-1', colorHex: '#1c1c1c' },
{ className: 'bg-black-2', colorName: 'black-2', colorHex: '#282828' },
{ className: 'bg-black-3', colorName: 'black-3', colorHex: '#343434' },
{ className: 'bg-black-4', colorName: 'black-4', colorHex: '#404040' },
{ className: 'bg-black-5', colorName: 'black-5', colorHex: '#4d4d4d' },
{ className: 'bg-black-6', colorName: 'black-6', colorHex: '#595959' },
{ className: 'bg-black-7', colorName: 'black-7', colorHex: '#656565' },
{ className: 'bg-black-8', colorName: 'black-8', colorHex: '#717171' },
{ className: 'bg-black-9', colorName: 'black-9', colorHex: '#7d7d7d' },
{ className: 'bg-black-10', colorName: 'black-10', colorHex: '#898989' }
]
|
package adminhandlers
import (
"encoding/json"
"log"
"net/http"
"github.com/backpulse/core/database"
"github.com/backpulse/core/models"
"github.com/backpulse/core/utils"
"github.com/gorilla/mux"
"github.com/teris-io/shortid"
"gopkg.in/mgo.v2/bson"
)
// CreateAlbum : create new album
func CreateAlbum(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
name := vars["name"]
site, _ := database.GetSiteByName(name)
user, _ := database.GetUserByID(utils.GetUserObjectID(r))
if !utils.IsAuthorized(site, user) {
utils.RespondWithJSON(w, http.StatusUnauthorized, "unauthorized", nil)
return
}
var album models.Album
/* Parse json to models.Album */
err := json.NewDecoder(r.Body).Decode(&album)
if err != nil {
utils.RespondWithJSON(w, http.StatusNotAcceptable, "error", nil)
return
}
album.ShortID, _ = shortid.Generate()
album.SiteID = site.ID
album.OwnerID = site.OwnerID
album.ID = bson.NewObjectId()
albums, _ := database.GetAlbums(site.ID)
album.Index = len(albums)
err = database.AddAlbum(album)
if err != nil {
log.Print(err)
utils.RespondWithJSON(w, http.StatusInternalServerError, "error", nil)
return
}
utils.RespondWithJSON(w, http.StatusOK, "success", nil)
return
}
// GetAlbums : return albums of site
func GetAlbums(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
name := vars["name"]
site, _ := database.GetSiteByName(name)
user, _ := database.GetUserByID(utils.GetUserObjectID(r))
if !utils.IsAuthorized(site, user) {
utils.RespondWithJSON(w, http.StatusUnauthorized, "unauthorized", nil)
return
}
albums, err := database.GetAlbums(site.ID)
if err != nil {
utils.RespondWithJSON(w, http.StatusInternalServerError, "error", nil)
return
}
utils.RespondWithJSON(w, http.StatusOK, "success", albums)
return
}
// GetAlbum : return specific album
func GetAlbum(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
name := vars["name"]
id := vars["id"]
site, _ := database.GetSiteByName(name)
user, _ := database.GetUserByID(utils.GetUserObjectID(r))
if !utils.IsAuthorized(site, user) {
utils.RespondWithJSON(w, http.StatusUnauthorized, "unauthorized", nil)
return
}
album, err := database.GetAlbum(bson.ObjectIdHex(id))
if err != nil {
utils.RespondWithJSON(w, http.StatusInternalServerError, "error", nil)
return
}
if album.SiteID != site.ID {
utils.RespondWithJSON(w, http.StatusUnauthorized, "unauthorized", nil)
return
}
tracks, _ := database.GetAlbumTracks(album.ID)
album.Tracks = tracks
utils.RespondWithJSON(w, http.StatusOK, "success", album)
return
}
// DeleteAlbum : remove album from db
func DeleteAlbum(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
name := vars["name"]
id := vars["id"]
site, _ := database.GetSiteByName(name)
user, _ := database.GetUserByID(utils.GetUserObjectID(r))
if !utils.IsAuthorized(site, user) {
utils.RespondWithJSON(w, http.StatusUnauthorized, "unauthorized", nil)
return
}
album, err := database.GetAlbum(bson.ObjectIdHex(id))
if album.SiteID != site.ID {
utils.RespondWithJSON(w, http.StatusUnauthorized, "unauthorized", nil)
return
}
for _, track := range album.Tracks {
database.RemoveTrack(track.ID)
}
err = database.RemoveAlbum(album.ID)
if err != nil {
utils.RespondWithJSON(w, http.StatusInternalServerError, "error", nil)
return
}
utils.RespondWithJSON(w, http.StatusOK, "success", nil)
return
}
// UpdateAlbum : rename & change image
func UpdateAlbum(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
siteName := vars["name"]
id := vars["id"]
site, _ := database.GetSiteByName(siteName)
user, _ := database.GetUserByID(utils.GetUserObjectID(r))
if !utils.IsAuthorized(site, user) {
utils.RespondWithJSON(w, http.StatusUnauthorized, "unauthorized", nil)
return
}
var album models.Album
/* Parse json to models.Album */
err := json.NewDecoder(r.Body).Decode(&album)
if err != nil {
utils.RespondWithJSON(w, http.StatusNotAcceptable, "error", nil)
return
}
a, err := database.GetAlbum(bson.ObjectIdHex(id))
if err != nil {
utils.RespondWithJSON(w, http.StatusNotFound, "not_found", nil)
return
}
if a.SiteID != site.ID {
utils.RespondWithJSON(w, http.StatusUnauthorized, "unauthorized", nil)
return
}
err = database.UpdateAlbum(bson.ObjectIdHex(id), album)
if err != nil {
utils.RespondWithJSON(w, http.StatusInternalServerError, "error", nil)
return
}
utils.RespondWithJSON(w, http.StatusOK, "success", nil)
return
}
// UpdateAlbumsIndxes : update order of albums
func UpdateAlbumsIndexes(w http.ResponseWriter, r *http.Request) {
vars := mux.Vars(r)
siteName := vars["name"]
site, _ := database.GetSiteByName(siteName)
user, _ := database.GetUserByID(utils.GetUserObjectID(r))
if !utils.IsAuthorized(site, user) {
utils.RespondWithJSON(w, http.StatusUnauthorized, "unauthorized", nil)
return
}
var albums []models.Album
/* Parse json to models.Album */
err := json.NewDecoder(r.Body).Decode(&albums)
if err != nil {
utils.RespondWithJSON(w, http.StatusNotAcceptable, "error", nil)
return
}
err = database.UpdateAlbumsIndexes(site.ID, albums)
if err != nil {
utils.RespondWithJSON(w, http.StatusNotAcceptable, "error", nil)
return
}
utils.RespondWithJSON(w, http.StatusOK, "success", nil)
return
}
|
package com.sample;
public class Player {
private Coach myCoach;
private String name;
private String email;
public void setMyCoach(Coach myCoach) {
this.myCoach = myCoach;
}
public void setName(String name) {
this.name = name;
}
public void setEmail(String email) {
this.email = email;
}
public void dailyRoutine() {
System.out.println("name : " + name);
System.out.println("email : " + email);
System.out.println(myCoach.dailyTraining());
}
}
|
// @codepen
import * as React from 'react';
import {
ComboBox,
Fabric,
IComboBox,
IComboBoxOption,
mergeStyles,
PrimaryButton,
SelectableOptionMenuItemType
} from 'office-ui-fabric-react/lib/index';
const INITIAL_OPTIONS: IComboBoxOption[] = [
{ key: 'Header1', text: 'First heading', itemType: SelectableOptionMenuItemType.Header },
{ key: 'A', text: 'Option A' },
{ key: 'B', text: 'Option B' },
{ key: 'C', text: 'Option C' },
{ key: 'D', text: 'Option D' },
{ key: 'divider', text: '-', itemType: SelectableOptionMenuItemType.Divider },
{ key: 'Header2', text: 'Second heading', itemType: SelectableOptionMenuItemType.Header },
{ key: 'E', text: 'Option E' },
{ key: 'F', text: 'Option F', disabled: true },
{ key: 'G', text: 'Option G' },
{ key: 'H', text: 'Option H' },
{ key: 'I', text: 'Option I' },
{ key: 'J', text: 'Option J' }
];
const wrapperClassName = mergeStyles({
selectors: {
'& > *': { marginBottom: '20px' },
'& .ms-ComboBox': { maxWidth: '300px' }
}
});
// tslint:disable:jsx-no-lambda
export class ComboBoxBasicExample extends React.Component<{}, {}> {
private _basicComboBox = React.createRef<IComboBox>();
public render(): JSX.Element {
return (
<Fabric className={wrapperClassName}>
<div>
{/* This example demonstrates various props, but only `options` is required. */}
<ComboBox
defaultSelectedKey="C"
label="Single-select ComboBox (uncontrolled, allowFreeform: T, autoComplete: T)"
allowFreeform
autoComplete="on"
options={INITIAL_OPTIONS}
componentRef={this._basicComboBox}
onFocus={() => console.log('onFocus called for basic uncontrolled example')}
onBlur={() => console.log('onBlur called for basic uncontrolled example')}
onMenuOpen={() => console.log('ComboBox menu opened')}
onPendingValueChanged={(option, pendingIndex, pendingValue) =>
console.log(`Preview value was changed. Pending index: ${pendingIndex}. Pending value: ${pendingValue}.`)
}
/>
<PrimaryButton
text="Open ComboBox"
style={{ display: 'block', marginTop: '10px' }}
onClick={() => {
if (this._basicComboBox.current) {
this._basicComboBox.current.focus(true);
}
}}
/>
</div>
<ComboBox
multiSelect
defaultSelectedKey={['C', 'E']}
label="Multi-select ComboBox (uncontrolled)"
allowFreeform
autoComplete="on"
options={INITIAL_OPTIONS}
/>
<ComboBox
label="ComboBox with placeholder text"
placeholder="Select or type an option"
allowFreeform
autoComplete="on"
options={INITIAL_OPTIONS}
/>
<ComboBox
label="ComboBox with persisted menu"
defaultSelectedKey="B"
allowFreeform
autoComplete="on"
persistMenu={true}
options={INITIAL_OPTIONS}
/>
<ComboBox
label="ComboBox with error message"
defaultSelectedKey="B"
errorMessage="Oh no! This ComboBox has an error!"
options={INITIAL_OPTIONS}
/>
<ComboBox disabled label="Disabled ComboBox" defaultSelectedKey="D" options={INITIAL_OPTIONS} />
</Fabric>
);
}
}
|
import React from "react"
const Footer = () => (
<div className="flex flex-row justify-between text-xs font-monospace opacity-75">
<span>© Cap Finance</span>
<div className="flex flex-row space-x-4 sm:space-x-12">
<a
href="https://twitter.com/CapDotFinance"
target="_blank"
className="hover:text-primary-100 transition duration-300"
>
TWITTER
</a>
<a
href="https://gov.cap.exchange"
target="_blank"
className="hover:text-primary-100 transition duration-300"
>
GOVERNANCE
</a>
<a
href="https://blog.cap.finance"
target="_blank"
className="hover:text-primary-100 transition duration-300"
>
BLOG
</a>
</div>
</div>
)
export default Footer
|
df = pd.DataFrame({'Name': names, 'Score': scores}) |
module ProviderInterface
class FeedbackPreviewComponentPreview < ViewComponent::Preview
def feedback_flow_for_direct_rejection
find_application_choice rejected_by_default: false
render_component
end
def feedback_flow_for_rejected_by_default
find_application_choice rejected_by_default: true
render_component
end
private
def find_application_choice(rejected_by_default:)
@application_choice = ApplicationChoice.order('RANDOM()').find_by(
rejected_by_default: rejected_by_default,
)
end
def render_component
if @application_choice
render ProviderInterface::FeedbackPreviewComponent.new(
application_choice: @application_choice,
rejection_reason: Faker::Lorem.paragraph_by_chars(number: 200),
)
else
render template: 'support_interface/docs/missing_test_data'
end
end
end
end
|
<gh_stars>10-100
/* eslint-disable no-await-in-loop */
import * as fs from 'fs-extra'
import * as path from 'path'
import * as os from 'os'
import * as chalk from 'chalk'
import * as glob from 'glob'
import * as deepmerge from 'deepmerge'
import * as Listr from 'listr'
import { prompt } from 'inquirer'
import findRoot from '../utils/findRoot'
import cmd from '../utils/cmd'
import cleanString from '../utils/cleanString'
import getDirectories from '../utils/getDirectories'
import logger from '../utils/logger'
async function getNameForApp(appName, currentApps) {
appName = (await prompt({
type: 'input',
name: 'input',
message: `Application name '${appName}' is already taken, please choose a different name:`,
})).input
appName = cleanString(appName)
if (currentApps[appName]) return getNameForApp(appName, currentApps)
return appName
}
export default async (name = '', options = { silent: true, dependenciesPrompt: false }) => {
const root = await findRoot()
if (!name) {
name = (await prompt({
type: 'input',
name: 'input',
message: 'What\'s the name or git repository of the template ? (ex: starter)',
})).input
}
const isGitRepo = name.indexOf('.git') !== -1
let gitRepo = ''
if (isGitRepo) {
gitRepo = name
name = cleanString(name
.replace(/((?:.git)?#.*)/, '')
.split('/')
.slice(-1)[0]
.replace(/[:#]/g, '')
.replace('.git', ''))
} else {
gitRepo = `https://github.com/firelayer/${name}-template.git`
}
// choose latest tag version that suits cli version
let stdout = ''
try {
stdout = (await cmd(`git ls-remote --tags ${gitRepo}`, {}, {
'GIT_TERMINAL_PROMPT': '0',
})) as string
} catch (error) {
const message = `Template not found. Repository: '${gitRepo}' not found.\n`
logger('addTemplate', message)
console.log(chalk.bold.red('\nError: ') + message)
process.exit(1)
}
const versions = stdout.split(/\r?\n/).map((line) => {
const match = line.match(/tags\/(.*)/)
return match ? match[1] : ''
})
let latest = versions.length > 0 ? versions[versions.length - 1] : ''
if (!latest) {
if (!options.silent) {
const message = `Can't find latest version for ${name}-template, using 'master' branch..`
logger('addTemplate', message)
console.log(chalk.bold(message))
}
latest = 'master'
}
if (!options.silent) console.log(chalk.bold(`\nGetting template from '${gitRepo}'..`))
const tempPath = path.join(os.tmpdir(), 'firelayer-templates', name)
// delete clone
fs.removeSync(tempPath)
await cmd(`git clone --branch ${latest} --depth 1 ${gitRepo} ${tempPath}`, {}, {
'GIT_TERMINAL_PROMPT': '0',
})
// check if i already have a app with same dir name on my current project
const newApps = {}
const currentApps = {}
getDirectories(`${tempPath}/apps`).forEach((app) => { app = path.basename(app); newApps[app] = app })
getDirectories('./apps').forEach((app) => { app = path.basename(app); currentApps[app] = app })
let overwritingFunctions = false
for (const app in newApps) {
if (currentApps[app]) {
if (app === 'functions') {
const quiz = await prompt({
type: 'confirm',
name: 'confirm',
default: false,
message: 'You can only have one \'functions\' app, do you want to overwrite the current one ?',
})
overwritingFunctions = quiz.confirm
if (!overwritingFunctions) delete newApps['functions']
} else {
const newName = `app-${name}`
if (currentApps[newName]) {
const appName = await getNameForApp(newName, currentApps)
newApps[app] = appName
} else {
newApps[app] = newName
}
}
}
}
const packageJSON = JSON.parse(fs.readFileSync('./package.json', 'utf8'))
// copy new apps into current project
for (const app in newApps) {
const appName = newApps[app]
// copy new app
fs.ensureDirSync('./apps')
fs.copySync(`${tempPath}/apps/${app}`, `./apps/${appName}`, { overwrite: true })
// copy/create configs for that app
fs.ensureDirSync(`./config/${appName}`)
fs.copySync(`${tempPath}/config/${app}/env.dist.json`, `./config/${appName}/env.dist.json`, { overwrite: true })
fs.copySync(`${tempPath}/config/${app}/env.dist.json`, `./config/${appName}/env.json`, { overwrite: true })
if (!packageJSON.scripts[`dev:${appName}`]) packageJSON.scripts[`dev:${appName}`] = `firelayer run "cd apps/${appName} && npm run dev"`
if (!packageJSON.scripts[`build:${appName}`]) packageJSON.scripts[`build:${appName}`] = `firelayer run "cd apps/${appName} && npm run build"`
if (appName === 'functions') {
if (!packageJSON.scripts['deploy:functions']) packageJSON.scripts['deploy:functions'] = 'npm run build:functions && firebase deploy --only functions'
} else {
if (!packageJSON.scripts[`deploy:${appName}`]) packageJSON.scripts[`deploy:${appName}`] = `npm run build:${appName} && firebase deploy --only hosting:${appName}`
}
const scripts = {}
Object.keys(packageJSON.scripts)
.sort()
.forEach((v) => {
scripts[v] = packageJSON.scripts[v]
})
packageJSON.scripts = scripts
}
fs.writeFileSync('./package.json', JSON.stringify(packageJSON, null, 2))
// merge app.dist.json with template one
if (fs.existsSync(`${tempPath}/config/app.dist.json`)) {
if (fs.existsSync('./config/app.dist.json')) {
const currentJSON = JSON.parse(fs.readFileSync('./config/app.dist.json', 'utf8'))
const newJSON = JSON.parse(fs.readFileSync(`${tempPath}/config/app.dist.json`, 'utf8'))
const merged = deepmerge(newJSON, currentJSON)
fs.writeFileSync('./config/app.dist.json', JSON.stringify(merged, null, 2))
} else {
fs.copySync(`${tempPath}/config/app.dist.json`, './config/app.dist.json')
}
}
const newTargets = []
// merge firebase.json
if (fs.existsSync(`${tempPath}/firebase.json`)) {
if (fs.existsSync('./firebase.json')) {
const currentJSON = JSON.parse(fs.readFileSync('./firebase.json', 'utf8'))
const newJSON = JSON.parse(fs.readFileSync(`${tempPath}/firebase.json`, 'utf8'))
// add functions
if (newApps['functions']) {
currentJSON.functions = newJSON.functions
}
// assign new app names to hosting on newJSON
const newHostings = newJSON.hosting
if (newHostings && newHostings.length > 0) {
newHostings.map((hosting) => {
try {
const appName = hosting.public.match(/[/](\w+)/i)[0].replace('/', '')
if (newApps[appName]) {
hosting.public = hosting.public.replace(appName, newApps[appName])
}
} catch (error) {
logger('addTemplate', error)
}
newTargets.push(hosting.target)
return hosting
})
const currentHostings = currentJSON.hosting
if (currentHostings && currentHostings.length > 0) {
// check if hosting target already exists
currentHostings.forEach((hosting) => {
let exists = -1
newHostings.forEach((newHosting, index) => {
if (JSON.stringify(hosting) === JSON.stringify(newHosting)) exists = index
})
if (exists >= 0) newHostings.splice(exists, 1)
})
currentJSON.hosting = currentHostings.concat(newHostings)
} else (
currentJSON.hosting = newHostings
)
}
fs.writeFileSync('./firebase.json', JSON.stringify(currentJSON, null, 2))
} else {
fs.copySync(`${tempPath}/firebase.json`, './firebase.json')
}
}
// merge .firebaserc
if (newTargets.length > 0) {
if (fs.existsSync('./.firebaserc')) {
const currentJSON = JSON.parse(fs.readFileSync('./.firebaserc', 'utf8'))
const defaultName = currentJSON.projects.default
const newDefaultHostingTargets = {
targets: {},
}
newDefaultHostingTargets.targets[defaultName] = { hosting: {} }
newTargets.forEach((target) => {
newDefaultHostingTargets.targets[defaultName].hosting[target] = [defaultName]
})
const newFirebaseRC = deepmerge(currentJSON, newDefaultHostingTargets, {
arrayMerge: (destinationArray, sourceArray, options) => sourceArray,
})
fs.writeFileSync('./.firebaserc', JSON.stringify(newFirebaseRC, null, 2))
}
}
// copy migrations
fs.ensureDirSync('./database/migrations')
glob.sync(`${tempPath}/database/migrations/*`).forEach((file) => {
fs.copyFileSync(file, path.join('./database/migrations', path.basename(file)))
})
// copy rules
if (fs.existsSync(`${tempPath}/rules`)) {
if (fs.existsSync('./rules')) {
const { confirm } = await prompt({
type: 'confirm',
name: 'confirm',
default: false,
message: 'Do you want to overwrite current rules ? \n (if not, a folder inside rules \'TO_MERGE\' will be made for manual merge)',
})
if (confirm) {
fs.copySync(`${tempPath}/rules`, './rules')
} else {
fs.copySync(`${tempPath}/rules`, './rules/TO_MERGE')
}
} else {
fs.copySync(`${tempPath}/rules`, './rules')
}
}
// get firelayer.js
let templateFn = () => {}
if (fs.existsSync(`${tempPath}/firelayer.js`)) {
try {
templateFn = require(`${tempPath}/firelayer.js`)
} catch (error) {
logger('addTemplate', error)
}
}
// delete clone
fs.removeSync(tempPath)
if (!options.silent) console.log(`\nAdded template ${chalk.cyan(name)}.\n`)
if (options.dependenciesPrompt) {
try {
const { confirm } = await prompt({
type: 'confirm',
name: 'confirm',
default: true,
message: 'Install dependencies?',
})
const tasksDependencies = new Listr([{
title: 'Installing dependencies',
skip: () => !confirm,
task: () => cmd('npm run bootstrap'),
}])
await tasksDependencies.run()
} catch (e) {
logger('addTemplate', e)
throw new Error(e)
}
}
if (!options.silent) console.log(chalk.bold.cyan('\nDon\'t forget to verify hosting properties in \'firebase.json\' and targets on \'.firebaserc\'\n'))
return templateFn
}
|
package com.badlogic.gdx.scenes.scene2d.ui;
import com.badlogic.gdx.scenes.scene2d.Group;
public class LayoutGroup extends Group {
}
|
<reponame>albertov05/DevExtreme
const { test } = QUnit;
import "ui/file_manager";
import ArrayFileProvider from "ui/file_manager/file_provider/array";
import { FileManagerRootItem } from "ui/file_manager/file_provider/file_provider";
import { ErrorCode } from "ui/file_manager/ui.file_manager.common";
const moduleConfig = {
beforeEach: function() {
this.options = {
data: [
{
name: "F1",
isDirectory: true,
items: [
{
name: "F1.1",
isDirectory: true
},
{
name: "F1.2",
isDirectory: true,
items: [
{
name: "File1.2.txt"
}
]
},
{
name: "F1.3",
isDirectory: true,
items: [
{
name: "F1.3.1",
isDirectory: true
}
]
}
]
},
{
name: "F2",
isDirectory: true
}
]
};
this.provider = new ArrayFileProvider(this.options);
},
};
QUnit.module("Array File Provider", moduleConfig, () => {
test("get directory file items", function(assert) {
let items = this.provider.getItems();
assert.equal(items.length, 2);
assert.equal(items[0].name, "F1");
assert.ok(items[0].hasSubDirs);
assert.equal(items[1].name, "F2");
assert.notOk(items[1].hasSubDirs);
let pathInfo = [ { key: "F1", name: "F1" } ];
items = this.provider.getItems(pathInfo);
assert.equal(3, items.length);
assert.equal(items[0].name, "F1.1");
assert.notOk(items[0].hasSubDirs);
assert.equal(items[1].name, "F1.2");
assert.notOk(items[1].hasSubDirs);
assert.equal(items[2].name, "F1.3");
assert.ok(items[2].hasSubDirs);
pathInfo = [
{ key: "F1", name: "F1" },
{ key: "F1/F1.2", name: "F1.2" }
];
items = this.provider.getItems(pathInfo);
assert.equal(items.length, 1);
assert.equal(items[0].name, "File1.2.txt");
});
test("move directory", function(assert) {
let items = this.provider.getItems();
this.provider.moveItems([ items[0] ], items[1]);
items = this.provider.getItems();
assert.equal(items.length, 1);
assert.ok(items[0].hasSubDirs);
});
test("throw error when try moving folder with incorrect parameters", function(assert) {
let errorCount = 0;
let lastErrorId = -1;
let items = this.provider.getItems();
try {
this.provider.moveItems([ items[0] ], items[0]);
} catch(e) {
errorCount++;
lastErrorId = e.errorId;
}
assert.equal(items[0].name, "F1");
assert.equal(errorCount, 1);
assert.equal(lastErrorId, ErrorCode.Other);
const pathInfo = [ { key: "F1", name: "F1" } ];
let subFolders = this.provider.getItems(pathInfo);
try {
this.provider.moveItems([ subFolders[0] ], subFolders[0]);
} catch(e) {
errorCount++;
lastErrorId = e.errorId;
}
assert.equal(subFolders[0].name, "F1.1");
assert.equal(errorCount, 2);
assert.equal(lastErrorId, ErrorCode.Other);
});
test("throw error when try copying folder with incorrect parameters", function(assert) {
let errorCount = 0;
let lastErrorId = -1;
let folders = this.provider.getItems();
try {
this.provider.copyItems([ folders[0] ], folders[0]);
} catch(e) {
errorCount++;
lastErrorId = e.errorId;
}
assert.equal(folders[0].name, "F1");
assert.equal(errorCount, 1);
assert.equal(lastErrorId, ErrorCode.Other);
const pathInfo = [ { key: "F1", name: "F1" } ];
let subFolders = this.provider.getItems(pathInfo);
try {
this.provider.copyItems([ subFolders[0] ], subFolders[0]);
} catch(e) {
errorCount++;
lastErrorId = e.errorId;
}
assert.equal(subFolders[0].name, "F1.1");
assert.equal(errorCount, 2);
assert.equal(lastErrorId, ErrorCode.Other);
});
test("create new folder with existing name", function(assert) {
this.provider.createFolder(new FileManagerRootItem(), "F1");
const dirs = this.provider.getItems();
assert.equal(dirs[0].name, "F1");
assert.equal(dirs[0].key, "F1");
assert.equal(dirs[1].name, "F2");
assert.equal(dirs[1].key, "F2");
assert.equal(dirs[2].name, "F1");
assert.notEqual(dirs[2].key, "F1");
assert.ok(dirs[2].key.length > 1);
});
test("throw error on creating new directory in unexisting directory", function(assert) {
let errorCount = 0;
let errorId = 0;
const f1Dir = this.provider.getItems()[0];
this.options.data.splice(0, this.options.data.length);
try {
this.provider.createFolder(f1Dir, "NewDir");
} catch(e) {
errorCount++;
errorId = e.errorId;
}
assert.equal(errorCount, 1);
assert.equal(errorId, ErrorCode.DirectoryNotFound);
});
test("rename file item with existing name", function(assert) {
const fileItems = this.provider.getItems();
this.provider.renameItem(fileItems[0], "F2");
assert.equal(fileItems[0].name, "F2");
assert.notEqual(fileItems[0].key, fileItems[1].key);
assert.equal(fileItems[1].name, "F2");
assert.equal(fileItems[1].key, "F2");
});
test("delete directory", function(assert) {
let fileItems = this.provider.getItems();
assert.equal("F1", fileItems[0].name);
assert.equal("F2", fileItems[1].name);
assert.equal(2, fileItems.length);
this.provider.deleteItems([ fileItems[0] ]);
fileItems = this.provider.getItems();
assert.equal("F2", fileItems[0].name);
assert.equal(1, fileItems.length);
});
test("throw exception if remove unexisting directory", function(assert) {
let errorCount = 0;
let errorId = 0;
const f1Dir = this.provider.getItems()[0];
this.options.data.splice(0, this.options.data.length);
try {
this.provider.deleteItems([ f1Dir ]);
} catch(e) {
errorCount++;
errorId = e.errorId;
}
assert.equal(errorCount, 1);
assert.equal(errorId, ErrorCode.DirectoryNotFound);
});
});
|
var lory = require('lory.js').lory;
module.exports = function slider(el) {
if (!(el instanceof HTMLElement)) {
console.warn('slider() expects an HTMLElement. Received', el, 'of type', typeof el);
return;
}
var dot_count = el.querySelectorAll('[data-js="slide"]').length;
var dot_container = el.querySelector('[data-js="dots"]');
var dot_list_item = document.createElement('li');
function handleDotEvent(e) {
if (e.type === 'before.lory.init') {
for (var i = 0, len = dot_count; i < len; i++) {
var clone = dot_list_item.cloneNode();
dot_container.appendChild(clone);
}
dot_container.childNodes[0].classList.add('active');
}
if (e.type === 'after.lory.init') {
for (var i = 0, len = dot_count; i < len; i++) {
dot_container.childNodes[i].addEventListener('click', function(e) {
dot_navigation_slider.slideTo(Array.prototype.indexOf.call(dot_container.childNodes, e.target));
});
}
}
if (e.type === 'after.lory.slide') {
for (var i = 0, len = dot_container.childNodes.length; i < len; i++) {
dot_container.childNodes[i].classList.remove('active');
}
dot_container.childNodes[e.detail.currentSlide - 1].classList.add('active');
}
if (e.type === 'on.lory.resize') {
for (var i = 0, len = dot_container.childNodes.length; i < len; i++) {
dot_container.childNodes[i].classList.remove('active');
}
dot_container.childNodes[0].classList.add('active');
}
}
el.addEventListener('before.lory.init', handleDotEvent);
el.addEventListener('after.lory.init', handleDotEvent);
el.addEventListener('after.lory.slide', handleDotEvent);
el.addEventListener('on.lory.resize', handleDotEvent);
var dot_navigation_slider = lory(el, {
infinite: 1,
enableMouseEvents: true
});
return dot_navigation_slider;
};
|
<reponame>davidyu62/egovframe-runtime
/*
* Copyright 2008-2009 MOPAS(Ministry of Public Administration and Security).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.egovframe.rte.itl.webservice.service.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map.Entry;
import org.egovframe.rte.itl.integration.type.RecordType;
import org.egovframe.rte.itl.integration.type.Type;
import org.egovframe.rte.itl.webservice.EgovWebServiceMessageHeader;
import org.egovframe.rte.itl.webservice.data.WebServiceServerDefinition;
import org.egovframe.rte.itl.webservice.service.ServiceEndpointInfo;
import org.egovframe.rte.itl.webservice.service.ServiceParamInfo;
import javax.jws.WebParam.Mode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.StringUtils;
/**
* 웹 서비스 ServiceEndpoint 정보 구현 클래스
* <p>
* <b>NOTE:</b> 웹 서비스 ServiceEndpoint 정보를 나타내는 class이다.
* </p>
*
* @author 실행환경 개발팀 심상호
* @since 2009.06.01
* @version 1.0
* <pre>
* 개정이력(Modification Information)
*
* 수정일 수정자 수정내용
* ----------------------------------------------
* 2009.06.01 심상호 최초 생성
* </pre>
*/
public class ServiceEndpointInfoImpl implements ServiceEndpointInfo {
private static final Logger LOGGER = LoggerFactory.getLogger(ServiceEndpointInfoImpl.class);
/** namespace */
private String namespace;
/** address */
private String address;
/** service name */
private String serviceName;
/** port name */
private String portName;
/** operation name */
private String operationName;
/** return info */
private ServiceParamInfo returnInfo;
/** param info */
private Collection<ServiceParamInfo> paramInfos;
/**
* Constructor
*
* @param namespace
* namespace
* @param address
* address
* @param serviceName
* service name
* @param portName
* port name
* @param operationName
* operation name
* @param returnInfo
* return info
* @param paramInfos
* param info
* @throws IllegalArgumentException
* Argument 값이 <code>null</code>인 경우
*/
public ServiceEndpointInfoImpl(String namespace, String address,
String serviceName, String portName, String operationName,
ServiceParamInfo returnInfo, Collection<ServiceParamInfo> paramInfos) {
super();
if (StringUtils.hasText(namespace) == false) {
LOGGER.error("Argument 'namespace' has no text ({})", namespace);
throw new IllegalArgumentException();
} else if (StringUtils.hasText(address) == false) {
LOGGER.error("Argument 'address' has no text ({})", address);
throw new IllegalArgumentException();
} else if (StringUtils.hasText(serviceName) == false) {
LOGGER.error("Argument 'serviceName' has no text ({})", serviceName);
throw new IllegalArgumentException();
} else if (StringUtils.hasText(portName) == false) {
LOGGER.error("Argument 'portName' has no text ({})", portName);
throw new IllegalArgumentException();
} else if (StringUtils.hasText(operationName) == false) {
LOGGER.error("Argument 'operationName' has no text ({})", operationName);
throw new IllegalArgumentException();
} else if (paramInfos == null) {
LOGGER.error("Argument 'paramInfos' is null");
throw new IllegalArgumentException();
}
this.namespace = namespace;
this.address = address;
this.serviceName = serviceName;
this.portName = portName;
this.operationName = operationName;
this.returnInfo = returnInfo;
this.paramInfos = paramInfos;
}
/**
* Constructor
*
* @param webServiceServerDefinition
* WebServiceServerDefinition
* @param requestType
* Request Message RecordType
* @param responseType
* Response Message RecordType
* @throws IllegalArgumentException
* Argument 값이 <code>null</code>인 경우
*/
public ServiceEndpointInfoImpl(
final WebServiceServerDefinition webServiceServerDefinition,
final RecordType requestType, final RecordType responseType) {
super();
if (webServiceServerDefinition == null) {
LOGGER.error("Argument 'webServiceServerDefinition' is null");
throw new IllegalArgumentException();
} else if (webServiceServerDefinition.isValid() == false) {
LOGGER.error("Argument 'webServiceServerDefinition' is invalid");
throw new IllegalArgumentException();
} else if (requestType == null) {
LOGGER.error("Argument 'requestType' is null");
throw new IllegalArgumentException();
} else if (responseType == null) {
LOGGER.error("Argument 'responseType' is null");
throw new IllegalArgumentException();
}
this.namespace = webServiceServerDefinition.getNamespace();
this.address = webServiceServerDefinition.getAddress();
this.serviceName = webServiceServerDefinition.getServiceName();
this.portName = webServiceServerDefinition.getPortName();
this.operationName = webServiceServerDefinition.getOperationName();
this.returnInfo = null;
this.paramInfos = new ArrayList<ServiceParamInfo>();
// header
this.paramInfos.add(new ServiceParamInfoImpl("header", EgovWebServiceMessageHeader.TYPE, Mode.INOUT, true));
// request body
for (Entry<String, Type> entry : requestType.getFieldTypes().entrySet()) {
this.paramInfos.add(new ServiceParamInfoImpl(entry.getKey(), entry.getValue(), Mode.IN, false));
}
// response body
for (Entry<String, Type> entry : responseType.getFieldTypes().entrySet()) {
this.paramInfos.add(new ServiceParamInfoImpl(entry.getKey(), entry.getValue(), Mode.OUT, false));
}
}
/**
* namespace
*
* @return the namespace
*/
public String getNamespace() {
return namespace;
}
/**
* address
*
* @return the address
*/
public String getAddress() {
return address;
}
/**
* serviceName
*
* @return the serviceName
*/
public String getServiceName() {
return serviceName;
}
/**
* portName
*
* @return the portName
*/
public String getPortName() {
return portName;
}
/**
* operationName
*
* @return the operationName
*/
public String getOperationName() {
return operationName;
}
/**
* returnInfo
*
* @return the returnInfo
*/
public ServiceParamInfo getReturnInfo() {
return returnInfo;
}
/**
* paramInfos
*
* @return the paramInfos
*/
public Collection<ServiceParamInfo> getParamInfos() {
return paramInfos;
}
public String getWsdlAddress() {
return null;
}
}
|
<reponame>RobertPHeller/RPi-RRCircuits
// -!- c++ -!- //////////////////////////////////////////////////////////////
//
// System :
// Module :
// Object Name : $RCSfile$
// Revision : $Revision$
// Date : $Date$
// Author : $Author$
// Created By : <NAME>
// Created : Wed Mar 24 09:44:24 2021
// Last Modified : <210425.1323>
//
// Description
//
// Notes
//
// History
//
/////////////////////////////////////////////////////////////////////////////
//
// Copyright (C) 2021 <NAME> D/B/A Deepwoods Software
// 51 Lock<NAME>ill Road
// Wendell, MA 01379-9728
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
//
//
//
//////////////////////////////////////////////////////////////////////////////
#ifndef __BBRAILCOMDRIVER_HXX
#define __BBRAILCOMDRIVER_HXX
#include <dcc/RailCom.hxx>
#include <dcc/RailcomHub.hxx>
#include <stdint.h>
#include "ExtendedRingBuffer.hxx"
#include <freertos_drivers/common/RailcomDriver.hxx>
#include <os/Gpio.hxx>
#include <signal.h>
#include <time.h>
template <class HW>
class BBRailComDriver : public RailcomDriver
{
public:
BBRailComDriver(size_t queue_size)
: railComFeedbackBuffer_(ExtendedRingBuffer<dcc::RailcomHubData>::create(queue_size))
{
}
void hw_init(dcc::RailcomHubFlow *hubFlow)
{
struct sigevent sev; // Timer event
railComHubFlow_ = hubFlow;
HW::hw_init();
uart_fd_ = HW::openport();
sev.sigev_notify = SIGEV_THREAD;
sev.sigev_value.sival_ptr = (void *) this;
sev.sigev_notify_function = BBRailComDriver<HW>::railcom_timer_tick;
if (timer_create(CLOCK_REALTIME, &sev, &timerid_) == -1) {
LOG(FATAL, "BBRailComDriver: failed to create timer (%d)", errno);
exit(errno);
}
}
void disable_output()
{
HW::HB_BRAKE::set(true);
usleep(1);
enabled_= HW::HB_ENABLE::get();
HW::HB_ENABLE::set(false);
}
void enable_output()
{
HW::HB_ENABLE::set(enabled_);
usleep(1);
HW::HB_BRAKE::set(false);
}
void start_cutout() override
{
struct itimerspec its;
disable_output();
usleep(HW::RAILCOM_TRIGGER_DELAY_USEC);
HW::flush(uart_fd_);
HW::RC_ENABLE::set(true);
railcomPhase_ = RailComPhase::CUTOUT_PHASE1;
its.it_value.tv_sec = 0;
its.it_value.tv_nsec = HW::RAILCOM_MAX_READ_DELAY_CH_1*1000;
its.it_interval.tv_sec = 0;
its.it_interval.tv_nsec = 0;
if (timer_settime(timerid_, 0, &its, NULL) == -1) {
LOG(FATAL, "BBRailComDriver: failed to start timer (%d)", errno);
exit(errno);
}
}
size_t rx_to_buf(uint8_t *buf, size_t max_len)
{
size_t rx_bytes = 0;
size_t avail = HW::data_avail(uart_fd_);
if (avail == 0) return avail;
if (avail > max_len) avail = max_len;
rx_bytes = HW::readbuff(uart_fd_,buf,avail);
return rx_bytes;
}
void middle_cutout() override
{
dcc::RailcomHubData *fb = railcom_buffer();
uint8_t rx_buf[6] = {0, 0, 0, 0, 0, 0};
size_t rx_bytes = rx_to_buf(rx_buf, 6);
if (fb)
{
for (size_t idx = 0; idx < rx_bytes; idx++)
{
fb->add_ch1_data(rx_buf[idx]);
}
}
}
void end_cutout() override
{
dcc::RailcomHubData *fb = railcom_buffer();
uint8_t rx_buf[6] = {0, 0, 0, 0, 0, 0};
size_t rx_bytes = rx_to_buf(rx_buf, 6);
if (fb)
{
for (size_t idx = 0; idx < rx_bytes; idx++)
{
fb->add_ch2_data(rx_buf[idx]);
}
advance_railcom_buffer();
}
HW::RC_ENABLE::set(false);
}
void no_cutout() override
{
HW::RC_ENABLE::set(false);
}
void set_feedback_key(uint32_t key) override
{
railcomFeedbackKey_ = key;
}
void feedback_sample() override
{
}
typedef enum : uint8_t
{
PRE_CUTOUT,
CUTOUT_PHASE1,
CUTOUT_PHASE2
} RailComPhase;
RailComPhase railcom_phase()
{
return railcomPhase_;
}
dcc::RailcomHubData *railcom_buffer()
{
dcc::RailcomHubData *data = nullptr;
if (railComFeedbackBuffer_->data_write_pointer(&data) > 0)
{
data->reset(railcomFeedbackKey_);
}
return data;
}
void advance_railcom_buffer()
{
railComFeedbackBuffer_->advance(1);
}
void timer_tick()
{
if (railcomPhase_ == RailComPhase::CUTOUT_PHASE1)
{
struct itimerspec its;
middle_cutout();
railcomPhase_ = RailComPhase::CUTOUT_PHASE2;
its.it_value.tv_sec = 0;
its.it_value.tv_nsec = HW::RAILCOM_MAX_READ_DELAY_CH_2*1000;
its.it_interval.tv_sec = 0;
its.it_interval.tv_nsec = 0;
if (timer_settime(timerid_, 0, &its, NULL) == -1) {
LOG(FATAL, "BBRailComDriver: failed to start timer (%d)", errno);
exit(errno);
}
} else if (railcomPhase_ == RailComPhase::CUTOUT_PHASE2)
{
end_cutout();
railcomPhase_ = RailComPhase::PRE_CUTOUT;
enable_output();
}
}
private:
int uart_fd_;
uintptr_t railcomFeedbackKey_{0};
dcc::RailcomHubFlow *railComHubFlow_;
ExtendedRingBuffer<dcc::RailcomHubData> *railComFeedbackBuffer_;
RailComPhase railcomPhase_{RailComPhase::PRE_CUTOUT};
bool enabled_{false};
timer_t timerid_;
static void railcom_timer_tick(union sigval sv)
{
BBRailComDriver<HW> * driver = reinterpret_cast<BBRailComDriver<HW> *> (sv.sival_ptr);
driver->timer_tick();
}
};
#endif // __BBRAILCOMDRIVER_HXX
|
#----------------------------------------------------------------------
# Initialize environment and alias
#----------------------------------------------------------------------
alias ls='ls --color'
alias ll='ls -lh'
alias la='ls -lAh'
alias grep='grep --color=tty'
alias nvim='/usr/local/opt/bin/vim --cmd "let g:vim_startup=\"nvim\""'
alias mvim='/usr/local/opt/bin/vim --cmd "let g:vim_startup=\"mvim\""'
alias tmux='tmux -2'
alias lld='lsd -l'
# default editor
export EDITOR=vim
# export TERM=xterm-256color
# disable ^s and ^q
# stty -ixon 2> /dev/null
# setup for go if it exists
if [ -d "$HOME/.local/go" ]; then
export GOPATH="$HOME/.local/go"
if [ -d "$HOME/.local/go/bin" ]; then
export PATH="$HOME/.local/go/bin:$PATH"
fi
fi
# setup for alternative go path
if [ -d "$HOME/go" ]; then
export GOPATH="$HOME/go"
if [ -d "$HOME/go/bin" ]; then
export PATH="$HOME/go/bin:$PATH"
fi
fi
# setup for /usr/local/app/bin if it exists
if [ -d /usr/local/app/bin ]; then
export PATH="/usr/local/app/bin:$PATH"
fi
# setup for go if it exists
if [ -d /usr/local/app/go ]; then
export GOROOT="/usr/local/app/go"
export PATH="/usr/local/app/go/bin:$PATH"
fi
# setup for nodejs
if [ -d /usr/local/app/node ]; then
export PATH="/usr/local/app/node/bin:$PATH"
fi
# setup for own dotfiles
if [ -d "$HOME/.vim/vim/tools/utils" ]; then
export PATH="$HOME/.vim/vim/tools/utils:$PATH"
fi
# setup for local rust
if [ -d "$HOME/.cargo/bin" ]; then
export PATH="$HOME/.cargo/bin;$PATH"
fi
# setup for cheat
if [ -d "$HOME/.vim/vim/cheat" ]; then
export CHEAT_USER_DIR=~/.vim/vim/cheat
fi
# setup for ~/bin
if [ -d "$HOME/bin" ]; then
export PATH="$HOME/bin:$PATH"
fi
#----------------------------------------------------------------------
# detect vim folder
#----------------------------------------------------------------------
if [ -n "$VIM_CONFIG" ]; then
[ ! -d "$VIM_CONFIG/etc" ] && VIM_CONFIG=""
fi
if [ -z "$VIM_CONFIG" ]; then
if [ -d "$HOME/.vim/vim/etc" ]; then
VIM_CONFIG="$HOME/.vim/vim"
elif [ -d "/mnt/d/ACM/github/vim/etc" ]; then
VIM_CONFIG="/mnt/d/ACM/github/vim"
elif [ -d "/d/ACM/github/vim/etc" ]; then
VIM_CONFIG="/d/ACM/github/vim/etc"
elif [ -d "/cygdrive/d/ACM/github/vim/etc" ]; then
VIM_CONFIG="/cygdrive/d/ACM/github/vim"
fi
fi
[ -z "$VIM_CONFIG" ] && VIM_CONFIG="$HOME/.vim/vim"
export VIM_CONFIG
[ -d "$VIM_CONFIG/cheat" ] && export CHEAT_PATH="$VIM_CONFIG/cheat"
export CHEAT_COLORS=true
if [ -f "$HOME/.local/lib/python/compinit.py" ]; then
export PYTHONSTARTUP="$HOME/.local/lib/python/compinit.py"
fi
#----------------------------------------------------------------------
# exit if not bash/zsh, or not in an interactive shell
#----------------------------------------------------------------------
[ -z "$BASH_VERSION" ] && [ -z "$ZSH_VERSION" ] && return
[[ $- != *i* ]] && return
#----------------------------------------------------------------------
# keymap
#----------------------------------------------------------------------
# default bash key binding
if [[ -n "$BASH_VERSION" ]]; then
bind '"\eh":"\C-b"'
bind '"\el":"\C-f"'
bind '"\ej":"\C-n"'
bind '"\ek":"\C-p"'
bind '"\eH":"\eb"'
bind '"\eL":"\ef"'
bind '"\eJ":"\C-a"'
bind '"\eK":"\C-e"'
bind '"\e;":"ll\n"'
elif [[ -n "$ZSH_VERSION" ]]; then
bindkey -s '\e;' 'll\n'
bindkey -s '\eu' 'ranger_cd\n'
fi
#----------------------------------------------------------------------
# https://github.com/rupa/z
#----------------------------------------------------------------------
if [[ -z "$DISABLE_Z_PLUGIN" ]]; then
if [[ ! -d "$HOME/.local/share/zlua" ]]; then
mkdir -p -m 700 "$HOME/.local/share/zlua" 2> /dev/null
fi
export _ZL_DATA="$HOME/.local/share/zlua/zlua.txt"
export _Z_DATA="$HOME/.local/share/zlua/z.txt"
export _ZL_USE_LFS=1
if [[ -x "$INIT_LUA" ]] && [[ -f "$HOME/.local/etc/z.lua" ]]; then
if [[ -n "$BASH_VERSION" ]]; then
eval "$($INIT_LUA $HOME/.local/etc/z.lua --init bash once enhanced fzf)"
elif [[ -n "$ZSH_VERSION" ]]; then
eval "$($INIT_LUA $HOME/.local/etc/z.lua --init zsh once enhanced)"
else
eval "$($INIT_LUA $HOME/.local/etc/z.lua --init auto once enhanced)"
fi
alias zi='z -i'
alias zb='z -b'
alias zf='z -I'
alias zh='z -I -t .'
alias zd='z -I .'
alias zbi='z -b -i'
alias zbf='z -b -I'
_ZL_ECHO=1
else
[[ -f "$HOME/.local/etc/z.sh" ]] && . "$HOME/.local/etc/z.sh"
alias zz='z'
fi
fi
alias zz='z -c'
alias zzc='zz -c'
#----------------------------------------------------------------------
# commacd.sh
#----------------------------------------------------------------------
COMMACD_CD="cd"
[[ -e "$HOME/.local/etc/commacd.sh" ]] && . "$HOME/.local/etc/commacd.sh"
#----------------------------------------------------------------------
# m.sh - bookmark
#----------------------------------------------------------------------
[[ -e "$HOME/.local/etc/m.sh" ]] && . "$HOME/.local/etc/m.sh"
#----------------------------------------------------------------------
# other interactive shell settings
#----------------------------------------------------------------------
export GCC_COLORS=1
export EXECIGNORE="*.dll"
|
require('sinatra')
require('sinatra/reloader')
require('./lib/coin_combo')
also_reload('./lib/**/*.rb')
get ('/') do
erb(:index)
end
get('/result') do
bank_100s = params.fetch('bank_100s').to_i()
change = params.fetch('change').to_f().*(100).to_i().coin_combo(bank_100s)
@sacagaweas = change.fetch(100)[0]
@half_dollars = change.fetch(50)[0]
@quarters = change.fetch(25)[0]
@dimes = change.fetch(10)[0]
@nickles = change.fetch(5)[0]
@pennies = change.fetch(1)[0]
@sacagawea_plural = change.fetch(100)[1]
@half_dollar_plural = change.fetch(50)[1]
@quarter_plural = change.fetch(25)[1]
@dime_plural = change.fetch(10)[1]
@nickle_plural = change.fetch(5)[1]
@penny_plural = change.fetch(1)[1]
erb(:result)
end
|
<reponame>getkuby/kube-dsl<filename>lib/kube-dsl/dsl/rbac/v1/role.rb<gh_stars>10-100
module KubeDSL::DSL::Rbac::V1
class Role < ::KubeDSL::DSLObject
object_field(:metadata) { KubeDSL::DSL::Meta::V1::ObjectMeta.new }
array_field(:rule) { KubeDSL::DSL::Rbac::V1::PolicyRule.new }
validates :metadata, object: { kind_of: KubeDSL::DSL::Meta::V1::ObjectMeta }
validates :rules, array: { kind_of: KubeDSL::DSL::Rbac::V1::PolicyRule }, presence: false
def serialize
{}.tap do |result|
result[:apiVersion] = "rbac.authorization.k8s.io/v1"
result[:kind] = "Role"
result[:metadata] = metadata.serialize
result[:rules] = rules.map(&:serialize)
end
end
def kind_sym
:role
end
end
end
|
def find_larger(a, b):
if a > b:
return a
else:
return b
print(find_larger(5, 10)) # Output: 10 |
<gh_stars>1-10
def capText(string_To_Cap):
return string_To_Cap.title()
|
import Cookies from 'js-cookie'
const TokenKey = 'Admin-Token'
const CityKey = 'citys'
export function getToken() {
return Cookies.get(TokenKey)
}
export function getCity() {
return Cookies.get(CityKey)
}
export function setCity(token) {
return Cookies.set(CityKey, token)
}
export function setToken(token) {
return Cookies.set(TokenKey, token)
}
export function removeToken() {
return Cookies.remove(TokenKey)
}
|
// dependents是基础依赖,以便按需使用plot
import GestureController from '@antv/g2/lib/chart/controller/gesture';
import { registerComponentController } from '@antv/g2';
export { GestureController };
registerComponentController('gesture', GestureController);
// G
export { Event as GraphicEvent } from '@antv/g-base';
export { Canvas } from '@antv/g-canvas';
export { Canvas as SVG } from '@antv/g-svg';
// G-Gesture
export { GM, Wheel } from '@antv/g-gesture';
// G2
export { View, registerAnimation, registerGeometry, Geometry, Interaction, InteractionAction, registerInteraction, registerAction, registerShape, getTheme, Util, getShapeFactory, ComponentController, registerComponentController, } from '@antv/g2';
export { VIEW_LIFE_CIRCLE, COMPONENT_TYPE, FIELD_ORIGIN } from '@antv/g2/lib/constant';
export { default as TooltipController } from '@antv/g2/lib/chart/controller/tooltip';
export { MarkerSymbols } from '@antv/g2/lib/util/marker';
import GrammarInteraction from '@antv/g2/lib/interaction/grammar-interaction';
export { GrammarInteraction };
import * as InteractionUtils from '@antv/g2/lib/interaction/action/util';
export { InteractionUtils };
export { DEFAULT_ANIMATE_CFG, getDefaultAnimateCfg, doAnimate } from '@antv/g2/lib/animate';
export { default as Element } from '@antv/g2/lib/geometry/element';
// Component
import HtmlTooltip from '@antv/component/lib/tooltip/html';
import HtmlTooltipTheme from '@antv/component/lib/tooltip/html-theme';
import * as TooltipCssConst from '@antv/component/lib/tooltip/css-const';
export { HtmlTooltip, HtmlTooltipTheme, TooltipCssConst };
export { GroupComponent, Axis, Legend, Tooltip, Slider, Scrollbar } from '@antv/component';
// Coordinate
export { Coordinate } from '@antv/coord';
// Common
export var ORIGIN = 'origin';
export var _ORIGIN = '_origin';
//# sourceMappingURL=dependents.js.map |
#!/bin/sh
# Copyright 1998-2019 Lawrence Livermore National Security, LLC and other
# HYPRE Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
usrconf="usr-manual/conf.py"
refconf="ref-manual/conf.doxygen"
version=`../utilities/version -number`
reldate=`../utilities/version -date`
usrdate=`date --date=$reldate +'%B %d, %Y'`
# User manual
sed -e 's/version = .*/version = \x27'$version'\x27/' $usrconf |
sed -e 's/release = .*/release = \x27'$version'\x27/' |
sed -e 's#today = .*#today = \x27'"$usrdate"'\x27#' > $usrconf.tmp
mv $usrconf.tmp $usrconf
# Reference manual
sed -e 's/PROJECT_NUMBER .*=.*/PROJECT_NUMBER = '$version'/' $refconf > $refconf.tmp
mv $refconf.tmp $refconf
|
# Source this script from "build" script to define
# VTK_DISABLE_MODULES variable
VTK_DISABLE_MODULES=""
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_CommonCore=YES"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_GUISupportQt=YES"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ViewsQt=YES"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingFreeType=YES"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_AcceleratorsVTKm=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ChartsCore=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_CommonArchive=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_CommonColor=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_CommonComputationalGeometry=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_CommonDataModel=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_CommonExecutionModel=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_CommonMath=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_CommonMisc=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_CommonSystem=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_CommonTransforms=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_DICOMParser=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_DomainsChemistry=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_DomainsChemistryOpenGL2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_DomainsMicroscopy=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_DomainsParallelChemistry=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersAMR=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersCore=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersExtraction=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersFlowPaths=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersGeneral=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersGeneric=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersGeometry=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersHybrid=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersHyperTree=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersImaging=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersModeling=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersOpenTURNS=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersParallel=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersParallelDIY2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersParallelFlowPaths=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersParallelGeometry=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersParallelImaging=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersParallelMPI=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersParallelStatistics=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersParallelVerdict=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersPoints=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersProgrammable=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersReebGraph=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersSMP=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersSelection=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersSources=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersStatistics=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersTexture=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersTopology=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_FiltersVerdict=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_GUISupportMFC=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_GUISupportQtSQL=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_GeovisCore=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_GeovisGDAL=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOADIOS2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOAMR=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOAsynchronous=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOCityGML=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOCore=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOEnSight=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOExodus=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOExport=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOExportGL2PS=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOExportPDF=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOFFMPEG=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOGDAL=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOGeoJSON=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOGeometry=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOH5part=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOImage=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOImport=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOInfovis=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOLAS=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOLSDyna=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOLegacy=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOMINC=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOMPIImage=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOMotionFX=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOMovie=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOMySQL=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IONetCDF=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOODBC=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOOggTheora=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOPDAL=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOPIO=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOPLY=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOParallel=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOParallelExodus=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOParallelLSDyna=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOParallelNetCDF=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOParallelXML=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOParallelXdmf3=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOPostgreSQL=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOSQL=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOSegY=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOTRUCHAS=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOTecplotTable=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOVPIC=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOVeraOut=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOVideo=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOXML=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOXMLParser=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOXdmf2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_IOXdmf3=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ImagingColor=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ImagingCore=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ImagingFourier=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ImagingGeneral=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ImagingHybrid=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ImagingMath=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ImagingMorphological=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ImagingOpenGL2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ImagingSources=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ImagingStatistics=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ImagingStencil=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_InfovisBoost=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_InfovisBoostGraphAlgorithms=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_InfovisCore=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_InfovisLayout=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_InteractionImage=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_InteractionStyle=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_InteractionWidgets=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_MomentInvariants=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ParallelCore=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ParallelDIY=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ParallelMPI=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_PoissonReconstruction=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_Powercrust=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_PythonInterpreter=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingAnnotation=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingContext2D=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingContextOpenGL2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingCore=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingExternal=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingFreeTypeFontConfig=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingGL2PSOpenGL2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingImage=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingLICOpenGL2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingLOD=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingLabel=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingMatplotlib=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingOpenGL2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingOpenVR=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingParallel=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingParallelLIC=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingQt=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingRayTracing=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingSceneGraph=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingUI=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingVolume=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingVolumeAMR=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingVolumeOpenGL2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_RenderingVtkJS=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_SignedTensor=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_SplineDrivenImageSlicer=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_TestingCore=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_TestingGenericBridge=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_TestingIOSQL=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_TestingRendering=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_UtilitiesBenchmarks=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ViewsContext2D=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ViewsCore=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ViewsInfovis=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_WebCore=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_WebGLExporter=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_WrappingPythonCore=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_WrappingTools=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_diy2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_doubleconversion=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_eigen=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_exodusII=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_expat=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_freetype=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_gl2ps=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_glew=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_h5part=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_hdf5=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_jpeg=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_jsoncpp=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_kissfft=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_kwiml=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_libharu=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_libproj=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_libxml2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_loguru=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_lz4=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_lzma=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_metaio=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_netcdf=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_octree=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_ogg=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_opengl=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_pegtl=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_png=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_pugixml=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_sqlite=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_theora=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_tiff=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_utf8=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_verdict=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_vpic=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_vtkDICOM=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_vtkm=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_vtksys=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_xdmf2=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_xdmf3=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_zfp=DONT_WANT"
VTK_DISABLE_MODULES="$VTK_DISABLE_MODULES -DVTK_MODULE_ENABLE_VTK_zlib=DONT_WANT"
|
#!/bin/bash
# Check if the directory path is provided as a command-line argument
if [ $# -ne 1 ]; then
echo "Usage: $0 <directory_path>"
exit 1
fi
directory_path=$1
# Replace tab characters with spaces using expand command with initial tab settings of 2 spaces
find "$directory_path" -path "$directory_path/env/riscos" -prune -o -name "Makefile" -exec bash -c 'expand --initial --tabs=2 "$0" > /tmp/e && mv /tmp/e "$0"' {} \;
# Replace tab characters with spaces using expand command with tab settings of 1 space
find "$directory_path" -path "$directory_path/env/riscos" -prune -o -name "Makefile" -exec bash -c 'expand --tabs=1 "$0" > /tmp/e && mv /tmp/e "$0"' {} \;
# Convert spaces back to tab characters using unexpand command with first-only tab settings of 2 spaces
find "$directory_path" -path "$directory_path/env/riscos" -prune -o -name "Makefile" -exec bash -c 'unexpand --first-only --tabs=2 "$0" > /tmp/e && mv /tmp/e "$0"' {} \; |
#!/bin/sh
#
# Runs the spectrum audio visualisation
cd bannervis
./spectrum /dev/shm/squeezelite-b8:27:eb:f5:ed:87 30
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-LPMI/model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-LPMI/512+0+512-SS-N-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_sentences_remove_all_but_nouns_first_half_quarter --eval_function last_quarter_eval |
<filename>api/src/main/java/brooklyn/location/MachineProvisioningLocation.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.location;
import java.util.Collection;
import java.util.Map;
/**
* A location that is able to provision new machines within its location.
*
* This interface extends {@link Location} to add the ability to provision {@link MachineLocation}s in this location.
*/
public interface MachineProvisioningLocation<T extends MachineLocation> extends ProvisioningLocation<T> {
/**
* Obtain a machine in this location.
*
* @param flags Details of the desired machine (e.g. image, size, open ports, etc; some flag support is limited to selected providers).
* "callerContext" can be specified to have custom logging and error messages (useful if starting machines in parallel)
* @return a machine that is a child of this location.
* @throws NoMachinesAvailableException if there are no machines available in this location (or impls may return null, but that is discouraged)
*/
@Override
T obtain(Map<?,?> flags) throws NoMachinesAvailableException;
/**
* Creates a new location of the same type, but with additional creation instructions in the form of flags,
* e.g. for specifying subnets, security groups, etc
* <p>
* Implementers who wish to subclass this provisioning location for additional functionality
* in a specific cloud can use the relevant implementation of this method as a guide.
*/
MachineProvisioningLocation<T> newSubLocation(Map<?,?> newFlags);
/**
* Release a previously-obtained machine.
*
* @param machine a {@link MachineLocation} previously obtained from a call to {@link #obtain()}
* @throws IllegalStateException if the machine did not come from a call to {@link #obtain()} or it has already been released.
*/
@Override
void release(T machine);
/**
* Gets flags, suitable as an argument to {@link #obtain(Map)}. The tags provided give
* hints about the machine required. The provisioning-location could be configured to
* understand those tags.
*
* For example, an AWS-location could be configured to understand that a particular entity
* type (e.g. "TomcatServer") requires a particular AMI in that region, so would return the
* required image id.
*
* @param tags
* @return
*/
Map<String,Object> getProvisioningFlags(Collection<String> tags);
}
|
#include <pic.h>
#include <xc.h>
#include <stdio.h>
#include "LCD.h"
#include "main.h"
#include "config.h"
#include "timer.h"
#include "SPI.h"
#include "SD.h"
#include "DAC.h"
#include "wave.h"
#include "error.h"
#include "buttons.h"
unsigned char sdata_lo;
unsigned char sdata_hi;
bool check_buttons = 0;
unsigned char first_byte = 0;
short number_of_errors = 0;
short total_presses = 0;
bool previous_pause;
bool current_pause;
bool wasPaused = false;
//FATFS filesys;
void error(Error e) {
// tell someone something went wrong
global_error = e;
__nop();
}
void init() {
// Set the system clock speed to 32MHz and wait for the ready flag.
OSCCON = 0xF4;
while(OSCSTATbits.HFIOFR == 0); // wait for clock to settle
ANSB1 = 0;
TRISB4 = 0; //GREEN LED
TRISB5 = 0; //RED LED
TRISC0 = 0;
TRISC1 = 0;
TRISB0 = 0;
TRISC6 = 0;
TRISC7 = 0;
TRISB0 = 1;
// LATCbits.LATC6 = 1;
//Initialize all required peripherals.
LCD_DESELECT();
SD_DESELECT();
SPI_Init();
BrassButtons_Init();
LCD_Init();
DAC_Init();
TRISA7 = 1; //CD interrupt pin
IOCAN7 = 1; //detect a falling edge on RA7
IOCIF = 0;
IOCAF = 0;
IOCIE = 1;
INTCONbits.GIE = 1;
SD_Init(); // TODO: change to fatfs stuff
// while(pf_mount(&filesys) != FR_OK);
}
void __interrupt() isr(void) { // modifies buffer_read_index
if (IOCAF7) {
// CD INTERRUPT
INTCONbits.GIE = 0; // disable all further interrupts
card_removed();
} else {
// AUDIO INTERRUPT
TMR2IF = 0;
unsigned short level = lbuffer[buffer_read_index++];
DAC5REFH = (level & 0xff00) >> 8;
DAC5REFL = (level & 0x00C0) << 8;
DAC5LD = 1;
if (buffer_read_index >= BUFFER_SIZE) buffer_read_index = 0;
}
}
void main(void) {
INTCONbits.GIE = 0;
init();
address = 0;
TRISA6 = 0;
// TRISA7 = 0;
LCD_SELECT();
LCD_DATA_MODE();
task_startScreen();
LCD_Cmd(LCD_CLS);
LCD_DATA_MODE();
LCD_Print("Playing!");
LCD_DESELECT();
SD_SELECT();
openFile(address);
if(channels != 2) samplePending = false;
timer_Init(sampRate);
task = task_playing;
while(1) { // main super loop, will execute the function that is pointed to
// by task
task();
}
}
//============================================================================//
//============================SUPERLOOP TASKS=================================//
void task_startScreen()
{
LCD_Print("Brass2Go! Press\nstart to play");
while(VALVE1 == 0);
delay(500);
}
void task_playing()
{
while(1)
{
if (blockIndex >= 512) { // end of block condition
DAC_INT(0);
// Check for end of the file
if (byteCounter >= dataLength) {
PIE1bits.TMR2IE = 0;
SD_CloseStream();
SD_DESELECT();
task = task_analysis;
return;
}
// Read 4 CRC bytes at the end of the block
SPI_POKE();
SPI_POKE();
SPI_POKE();
SPI_POKE();
DAC_INT(1);
check_buttons = true;
++address;
blockIndex = 0;
// Check for 0 -> 1 transition of the pause button
current_pause = PAUSEBUTTON;
if(current_pause && !previous_pause) {
task = task_paused;
return;
}
previous_pause = current_pause;
} else {
DAC_INT(0); // disable timer interrupts while accessing buffer_read_index
if (buffer_write_index != buffer_read_index) { // read into the buffer if there's space
DAC_INT(1);
if (channels == 1) {
LATA6 = 1;
// Read 16 bit sample into sdata_lo/hi
SPI_READ(sdata_lo);
SPI_READ(sdata_hi);
//Write to the buffer
lbuffer[ buffer_write_index ] = ((sdata_hi << 8) | sdata_lo) - 0x8000;
// LATA6 = 0;
byteCounter += 2;
blockIndex += 2;
//CHECK IF THE CORRECT BUTTONS ARE PRESSED AND
//ADD TO THE NUMBER OF ERRORS IF IT IS WRONG
if(check_buttons) {
check_buttons = false;
first_byte = sdata_lo;
if(first_byte % 2 == 1){ //IF BIT 0 == 1
// note is to be played here, so check which buttons are depressed
++total_presses;
if(!Check_Buttons(first_byte))
{
// The buttons were wrong
ONRED
OFFGREEN
++number_of_errors;
}else
{
OFFRED
ONGREEN
}
}
}
} else { // channels != 1 e.g. file is stereo
DAC_INT(0);
SD_DESELECT();
LCD_Cmd(LCD_CLS);
LCD_Print("File is not mono");
while(1)
{
// RESET
if(PORTBbits.RB1 == 1)
{
delay(250);
LCD_Cmd(LCD_CLS);
RESET();
}
}
}// end if(channels == 1) -- else
if (++buffer_write_index >= BUFFER_SIZE) buffer_write_index = 0;
} else DAC_INT(1);
}
}
}
void task_paused()
{
//Disable SD Card SPI interface and enable LCD
DAC_INT(0);
SD_CloseStream();
SD_DESELECT();
LCD_Cmd(LCD_CLS);
LCD_Print("Paused");
do {
previous_pause = current_pause;
current_pause = PAUSEBUTTON;
//RESET
if(VALVE1)
{
delay(250);
LCD_Cmd(LCD_CLS);
RESET();
}
} while(!(!previous_pause && current_pause));
previous_pause = true; // prevent a new pause from being triggered
LCD_Cmd(LCD_CLS); //Clear display
LCD_Print("Playing!");
LCD_DESELECT();
SD_SELECT();
// Re-open SD card at the last address
SD_OpenStream(address);
//Reset buffer
buffer_read_index = 0;
buffer_write_index = 1;
task = task_playing;
// delay(750);// Delay playback so user can get ready to play the next note
wasPaused = true;
}
void task_analysis()
{
LCD_Cmd(LCD_CLS);
char message[34];
sprintf(message, "%d/%d wrong\n%.2f%% correct", number_of_errors, total_presses, 100*(float)(total_presses-number_of_errors)/total_presses);
LCD_Print(message);
while(!PORTBbits.RB1);
// Reset
delay(250);
LCD_Cmd(LCD_CLS);
RESET();
}
void card_removed() {
INTCONbits.GIE = 0; // disable all interrupts
DAC5REFH = 0; //set the DAC to zero to avoid a weird whine
DAC5REFL = 0;
DAC5LD = 1;
delay(5);
SD_DESELECT();
LCD_Cmd(LCD_CLS);
LCD_Print("NO CARD DETECTED\nPlease insert SD");
delay(500);
while(!SD_CD_PIN);
RESET();
}
|
<filename>Modules/Visualization/MonteverdiGui/src/mvdProjectionBarWidget.cxx
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "mvdProjectionBarWidget.h"
#include "ui_mvdProjectionBarWidget.h"
namespace mvd
{
ProjectionBarWidget::ProjectionBarWidget( QWidget* p, Qt::WindowFlags flags ) :
QWidget( p, flags )
, m_UI( new mvd::Ui::ProjectionBarWidget() )
{
m_UI->setupUi( this );
}
ProjectionBarWidget::~ProjectionBarWidget()
{
delete m_UI;
m_UI = NULL;
}
void ProjectionBarWidget::SetProjectionScale(double scale_x, double )
{
QString text = "1:1";
if( scale_x>1.0 )
text = QString( "%1:1" ).arg( scale_x );
else if( scale_x<1.0 )
text = QString( "1:%1" ).arg( 1.0 / scale_x );
m_UI->projectionScaleLineEdit->setText(text);
}
void ProjectionBarWidget::on_projectionScaleLineEdit_returnPressed()
{
ChangeScale();
}
void ProjectionBarWidget::on_projectionScaleLineEdit_editingFinished()
{
if(m_UI->projectionScaleLineEdit->isModified())
{
ChangeScale();
}
}
void ProjectionBarWidget::ChangeScale()
{
// Cancel if scale text is empty.
if( m_UI->projectionScaleLineEdit->text().isEmpty() )
return;
// Split scale text.
QStringList scale( m_UI->projectionScaleLineEdit->text().split( ':' ) );
if( scale.size()!=1 && scale.size()!=2 )
return;
// Convert scale numerator.
bool isOk = true;
double numerator = scale.front().toDouble( &isOk );
if( !isOk || numerator==0.0 )
return;
// Convert scale denominator.
double denominator = 1.0;
if( scale.size()>1 )
{
denominator = scale.back().toDouble( &isOk );
if( !isOk )
return;
}
// Emit scale changed.
emit ProjectionScaleChanged( numerator / denominator );
}
}
|
ALTER TABLE PIM_NOTE ADD COLUMN CLIENT_X INT;
ALTER TABLE PIM_NOTE ADD COLUMN CLIENT_Y INT;
ALTER TABLE PIM_NOTE ADD COLUMN STATUS VARCHAR(50);
|
#!/usr/bin/env zsh
main() {
local target=$1
[[ ! $target ]] && target='.'
local br=`git -C $target branch | grep '*'`;
br=${br/* /}
git -C $target fetch --all
git -C $target reset --hard origin/${br}
}
main $*
|
package config
import (
"io/ioutil"
"os"
"strconv"
"gopkg.in/yaml.v2"
)
// Config is the main Configuration struct
type Config struct {
// MeshName is the name of the mesh to form or to join
MeshName string `yaml:"mesh-name"`
// NodeName is the name of the current node. If not set it
// will be formed from the mesh ip assigned
NodeName string `yaml:"node-name"`
// Bootstrap is the config part for bootstrap mode
Bootstrap *BootstrapConfig `yaml:"bootstrap,omitempty"`
// Join is the config part for join mode
Join *JoinConfig `yaml:"join,omitempty"`
// Wireguard is the configuration part for wireguard-related settings
Wireguard *WireguardConfig `yaml:"wireguard,omitempty"`
// Agent contains optional agent configuration
Agent *AgentConfig `yaml:"agent,omitempty"`
// UI contains web user interface configuration
UI *UIConfig `yaml:"ui,omitempty"`
// MemberlistFile is an optional setting. If set, node information is written
// here periodically
MemberlistFile string `yaml:"memberlist-file"`
}
// BootstrapConfig contains condfiguration parts for bootstrap mode
type BootstrapConfig struct {
// MeshCIDRRange is the CIDR (e.g. 10.232.0.0/16) to be used for the mesh
// when assigning new mesh-internal ip addresses
MeshCIDRRange string `yaml:"mesh-cidr-range"`
// MeshIPAMCIDRRange is an optional setting where this is a subnet of
// MeshCIDRRange and IP addresses are assigned only from this range
MeshIPAMCIDRRange string `yaml:"mesh-ipam-cidr-range"`
// NodeIP sets the internal mesh ip of this node (e.g. .1 for a given subnet)
NodeIP string `yaml:"node-ip"`
// GRPCBindAddr is the ip address where bootstrap node expose their
// gRPC intnerface and listen for join requests
GRPCBindAddr string `yaml:"grpc-bind-addr"`
// GRPCBindPort is the port number where bootstrap node expose their
// gRPC intnerface and listen for join requests
GRPCBindPort int `yaml:"grpc-bind-port"`
// GRPCTLSConfig is the optional TLS settings struct for the gRPC interface
GRPCTLSConfig *BootstrapGRPCTLSConfig `yaml:"grpc-tls,omitempty"`
// MeshEncryptionKey is an optional key for symmetric encryption of internal mesh traffic.
// Must be 32 Bytes base64-ed.
MeshEncryptionKey string `yaml:"mesh-encryption-key"`
// SerfModeLAN activates LAN mode or cluster communication. Default is false (=WAN mode).
SerfModeLAN bool `yaml:"serf-mode-lan"`
}
// JoinConfig contains condfiguration parts for join mode
type JoinConfig struct {
// BootstrapEndpoint is the IP:Port of remote mesh bootstrap node.
BootstrapEndpoint string `yaml:"bootstrap-endpoint"`
// ClientKey points to PEM-encoded private key to be used by the joining client when dialing the bootstrap node.
ClientKey string `yaml:"client-key"`
// ClientCert points to PEM-encoded certificate be used by the joining client when dialing the bootstrap node.
ClientCert string `yaml:"client-cert"`
// ClientCaCert points to PEM-encoded CA certificate.
ClientCaCert string `yaml:"ca-cert"`
}
// BootstrapGRPCTLSConfig contains settings necessary for configuration TLS for the bootstrap node
type BootstrapGRPCTLSConfig struct {
// GRPCServerKey points to PEM-encoded private key to be used by grpc server.
GRPCServerKey string `yaml:"grpc-server-key"`
// GRPCServerCert points to PEM-encoded certificate be used by grpc server.
GRPCServerCert string `yaml:"grpc-server-cert"`
// GRPCCaCert points to PEM-encoded CA certificate.
GRPCCaCert string `yaml:"grpc-ca-cert"`
// GRPCCaPath points to a directory containing PEM-encoded CA certificates.
GRPCCaPath string `yaml:"grpc-ca-path"`
}
// WireguardConfig contains wireguard-related settings
type WireguardConfig struct {
// ListenAddr is the ip address where wireguard should listen for packets
ListenAddr string `yaml:"listen-addr"`
// ListenPort is the (external) wireguard listen port
ListenPort int `yaml:"listen-port"`
}
// AgentConfig contains settings for the gRPC-based local agent
type AgentConfig struct {
// GRPCBindSocket is the local socket file to bind grpc agent to.
GRPCBindSocket string `yaml:"agent-grpc-bind-socket"`
// GRPCBindSocketIDs of the form <uid:gid> to change bind socket to.
GRPCBindSocketIDs string `yaml:"agent-grpc-bind-socket-id"`
// GRPCSocket is the local socket file, used by agent clients.
GRPCSocket string `yaml:"agent-grpc-socket"`
}
// UIConfig contains config entries for the web user interface
type UIConfig struct {
HTTPBindAddr string `yaml:"http-bind-addr"`
HTTPBindPort int `yaml:"http-bind-port"`
}
// LoadConfigFromFile reads yaml config file from given path
func (cfg *Config) LoadConfigFromFile(path string) error {
b, err := ioutil.ReadFile(path)
if err != nil {
return err
}
if err = yaml.Unmarshal(b, cfg); err != nil {
return err
}
return nil
}
// NewDefaultConfig creates a default configuration with valid presets.
// These presets can be used with `-dev` mode.
func NewDefaultConfig() Config {
return Config{
MeshName: envStrWithDefault("WGMESH_MESH_NAME", ""),
NodeName: envStrWithDefault("WGMESH_NODE_NAME", ""),
Bootstrap: &BootstrapConfig{
MeshCIDRRange: envStrWithDefault("WGMESH_CIDR_RANGE", "10.232.0.0/16"),
MeshIPAMCIDRRange: envStrWithDefault("WGMESH_CIDR_RANGE_IPAM", ""),
NodeIP: envStrWithDefault("WGMESH_MESH_IP", "10.232.1.1"),
GRPCBindAddr: envStrWithDefault("WGMESH_GRPC_BIND_ADDR", "0.0.0.0"),
GRPCBindPort: envIntWithDefault("WGMESH_GRPC_BIND_PORT", 5000),
GRPCTLSConfig: &BootstrapGRPCTLSConfig{
GRPCServerKey: envStrWithDefault("WGMESH_SERVER_KEY", ""),
GRPCServerCert: envStrWithDefault("WGMESH_SERVER_CERT", ""),
GRPCCaCert: envStrWithDefault("WGMESH_CA_CERT", ""),
GRPCCaPath: envStrWithDefault("WGMESH_CA_PATH", ""),
},
MeshEncryptionKey: envStrWithDefault("WGMESH_ENCRYPTION_KEY", ""),
SerfModeLAN: envBoolWithDefault("WGMESH_SERF_MODE_LAN", false),
},
Join: &JoinConfig{
BootstrapEndpoint: envStrWithDefault("WGMESH_BOOTSTRAP_ADDR", ""),
ClientKey: envStrWithDefault("WGMESH_CLIENT_KEY", ""),
ClientCert: envStrWithDefault("WGMESH_CLIENT_CERT", ""),
ClientCaCert: envStrWithDefault("WGMESH_CA_CERT", ""),
},
Wireguard: &WireguardConfig{
ListenAddr: envStrWithDefault("WGMESH_WIREGUARD_LISTEN_ADDR", ""),
ListenPort: envIntWithDefault("WGMESH_WIREGUARD_LISTEN_PORT", 54540),
},
Agent: &AgentConfig{
GRPCBindSocket: envStrWithDefault("WGMESH_AGENT_BIND_SOCKET", "/var/run/wgmesh.sock"),
GRPCBindSocketIDs: envStrWithDefault("WGMESH_AGENT_BIND_SOCKET_ID", ""),
GRPCSocket: envStrWithDefault("WGMESH_AGENT_SOCKET", "/var/run/wgmesh.sock"),
},
UI: &UIConfig{
HTTPBindAddr: envStrWithDefault("WGMESH_HTTP_BIND_ADDR", "127.0.0.1"),
HTTPBindPort: envIntWithDefault("WGMESH_HTTP_BIND_PORT", 9095),
},
MemberlistFile: envStrWithDefault("WGMESH_MEMBERLIST_FILE", ""),
}
}
func envStrWithDefault(key string, defaultValue string) string {
res := os.Getenv(key)
if res == "" {
return defaultValue
}
return res
}
func envBoolWithDefault(key string, defaultValue bool) bool {
res := os.Getenv(key)
if res == "" {
return defaultValue
}
if res == "1" || res == "true" || res == "on" {
return true
}
return false
}
func envIntWithDefault(key string, defaultValue int) int {
res := os.Getenv(key)
if res == "" {
return defaultValue
}
v, err := strconv.Atoi(res)
if err != nil {
return -1
}
return v
}
|
/*
* #%L
* Common package for I/O and related utilities
* %%
* Copyright (C) 2005 - 2016 Open Microscopy Environment:
* - Board of Regents of the University of Wisconsin-Madison
* - Glencoe Software, Inc.
* - University of Dundee
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package loci.common;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.net.URL;
import java.net.URLConnection;
/**
* Provides random access to URLs using the IRandomAccess interface.
* Instances of URLHandle are read-only.
*
* @see IRandomAccess
* @see StreamHandle
* @see java.net.URLConnection
*
* @author <NAME> <EMAIL>
*/
public class URLHandle extends StreamHandle {
// -- Fields --
/** URL of open socket */
private String url;
/** Socket underlying this stream */
private URLConnection conn;
// -- Constructors --
/**
* Constructs a new URLHandle using the given URL.
*
* @param url the fully qualified URL path
* @throws IOException if the URL is invalid or unreadable
*/
public URLHandle(String url) throws IOException {
if (!url.startsWith("http") && !url.startsWith("file:")) {
url = "http://" + url;
}
this.url = url;
resetStream();
}
// -- IRandomAccess API methods --
/* @see IRandomAccess#seek(long) */
@Override
public void seek(long pos) throws IOException {
if (pos < fp && pos >= mark) {
stream.reset();
fp = mark;
skip(pos - fp);
}
else super.seek(pos);
}
// -- StreamHandle API methods --
/* @see StreamHandle#resetStream() */
@Override
protected void resetStream() throws IOException {
conn = (new URL(url)).openConnection();
stream = new DataInputStream(new BufferedInputStream(
conn.getInputStream(), RandomAccessInputStream.MAX_OVERHEAD));
fp = 0;
mark = 0;
length = conn.getContentLength();
if (stream != null) stream.mark(RandomAccessInputStream.MAX_OVERHEAD);
}
// -- Helper methods --
/** Skip over the given number of bytes. */
private void skip(long bytes) throws IOException {
while (bytes >= Integer.MAX_VALUE) {
bytes -= skipBytes(Integer.MAX_VALUE);
}
int skipped = skipBytes((int) bytes);
while (skipped < bytes) {
int n = skipBytes((int) (bytes - skipped));
if (n == 0) break;
skipped += n;
}
}
}
|
<form action="contact-us.php" method="post">
<div>
<label for="name">Name:</label>
<input type="text" name="name" id="name" />
</div>
<div>
<label for="email">Email:</label>
<input type="email" name="email" id="email" />
</div>
<div>
<label for="message">Message:</label>
<textarea name="message" id="message" cols="30" rows="10"></textarea>
</div>
<div>
<input type="submit" value="Submit" />
</div>
</form> |
import datetime
import requests
import numpy as np
import yaml
def process_and_save_data():
try:
response = requests.get("https://api.example.com/data")
response.raise_for_status() # Raise an error for 4xx or 5xx status codes
data = response.json()
except requests.RequestException as e:
raise ConnectionError("Failed to retrieve data from the API") from e
if "values" not in data:
raise ValueError("Invalid JSON format: missing 'values' field")
mean_value = np.mean(data["values"])
processed_data = {"mean_value": mean_value, "timestamp": str(datetime.datetime.now())}
try:
with open("processed_data.yaml", "w") as file:
yaml.dump(processed_data, file)
except Exception as e:
print("Failed to save processed data:", e)
# Example usage
process_and_save_data() |
import React, { useEffect, useState } from "react";
import { useNavigate } from "react-router-dom";
import axios from "axios";
import { useAuth } from "../contexts/authContext";
import Navigation from "./Navigate";
import { Button } from "@material-ui/core";
import { DetailsSharp } from "@material-ui/icons";
import "./SellerDashBoard.scss";
export const SellerDashboard = () => {
const [shops, setShops] = useState([]);
const { token } = useAuth();
const [refresh, setRefresh] = useState(false);
const [tempList, setTempList] = useState([]);
const [searchTxt, setSearchTxt] = useState("");
const [seller, setSeller] = useState();
const searching = (txt) => {
setSearchTxt(txt);
var result = tempList.filter((item) => {
return item.buyer[0].shopName.toLowerCase().includes(txt.toLowerCase());
});
setShops(result);
};
const handelmyorder = () => {
navigate("/mystock");
};
const Details = (id) => {
console.log("clicked");
navigate(`/sellerOrderDetails/${id}`);
};
const handelproducts = () => {
navigate("/home");
};
const navigate = useNavigate();
useEffect(() => {
(async () => {
try {
const response = await axios.get("http://localhost:5000/seller/orders", {
headers: { "x-access-token": token },
});
console.log(response.data);
setShops(response.data.reverse());
setTempList(response.data);
} catch (error) {}
})();
const getSellerProfile = async () => {
const response = await axios.get("http://localhost:5000/seller/profile", {
headers: { "x-access-token": token },
});
const data = await response.data;
console.log("seller profile", data);
setSeller(data);
};
getSellerProfile();
}, [token]);
const [state, setState] = useState("");
const [sstate, ssetState] = useState("");
const handleClick1 = (id) => {
setState({
button: !state.button,
});
navigate("/mystock", id);
};
const handleClick2 = () => {
setState({
button: !state.button,
});
navigate("/home");
};
return (
<>
<Navigation />
<div>
{seller && <h3 className="greeting">Hi, {seller.fullname}</h3>}
<p className="interacting">Here are your orders </p>
</div>
<div>
<input
placeholder="search for Buyers Shops"
onChange={(event) => searching(event.target.value)}
className="searchBar"
type="search"
name=""
id=""
/>
</div>
<div className="home-bottom">
{/* <div className="buttons"> */}
{/* <div className="container">
<button style={{width:400,marginLeft:30,height:20}} className={state.button ? "buttonTrue": "buttonFalse"} onClick={()=>handleClick1()}>Orders</button>
<button className={state.button ? "buttonTrue": "buttonFalse"} onClick={()=>handleClick1()}> <i className="far fa-smile"></i>See My Stocks</button>
</div> */}
<button
style={{ width: "50%", height: 40 }}
className={state.button ? "buttonFalse" : "buttonTrue"}
onClick={() => handleClick2()}
>
Orders
</button>
<button
style={{ width: "50%", height: 40 }}
className={state.button ? "buttonTrue" : "buttonFalse"}
onClick={() => handleClick1()}
>
See My Stocks
</button>
{/* </div> */}
<div className="home-cards">
{shops.map((shop) => {
return (
<div key={shop._id} className="home-card">
<div
style={{
height: "70px",
backgroundColor: "",
display: "flex",
flexDirection: "column",
}}
>
<div
style={{
width: "50px",
height: "70px",
display: "flex",
flexDirection: "column",
}}
>
<div style={{ zIndex: "10" }}>
<img src={shop.products[0].img} alt="img" width="50px" height="70px" />
</div>
<div
style={{
// marginTop: 40,
marginLeft: 4,
marginBottom: 0,
display: "flex",
flexDirection: "row",
}}
>
<div style={{ marginTop: 3, fontSize: "smaller" }}>
<b>Status:</b>
<br></br> <b>{shop.status}</b>
</div>
<div
className="whatsapp detailsOver"
// style={{
// marginLeft: "40rem",
// cursor: "pointer",
// zIndex: 10,
// width: 92,
// height: 40,
// }}
onClick={() => {
Details(shop._id);
}}
>
{/* <img src={require("../images/whatsappicon.png").default} alt="" /> */}
<span className="whatsapp" style={{ color: "white" }}>
ViewDetails
</span>
</div>
</div>
</div>
</div>
<div
className="home-card--content"
style={{ marginBottom: "3rem", marginLeft: "1rem" }}
>
{/* <button
onClick={() => {
console.log(shop.orderDetails.orderId);
}}
>
click
</button> */}
<p>#{shop.orderDetails[0].OrderId}</p>
<h2>Order By: {shop.buyer[0].shopName} </h2>
<h3>Bill: INR {shop.bill} </h3>
<h3>Payment Mode: {shop.orderDetails[0]["Payment Mode"]}</h3>
Items: {shop.products.length} Quantity:{shop.quantity}
</div>
</div>
);
})}
</div>
</div>
</>
);
};
export default SellerDashboard;
|
<filename>api/scheduler.go
package main
import (
"time"
"github.com/kansuke231/go-with-vue/api/database"
"github.com/kansuke231/go-with-vue/api/models"
)
// schedule spawns a go routine that updates BestNews every delay specified.
func schedule(db *database.DB, bestNews *models.BestNews, update func(db *database.DB, s *models.BestNews), delay time.Duration) {
go func() {
for {
// Every delay (e.g. 5 minutes), update() gets executed.
select {
case <-time.After(delay):
update(db, bestNews)
}
}
}()
}
func updateBestNews(db *database.DB, bestNews *models.BestNews) {
bestNews.TopRated = db.GetBestNews()
bestNews.Created = time.Now().String()
}
func generateBestNews(db *database.DB) *models.BestNews {
return &models.BestNews{TopRated: db.GetBestNews(), Created: time.Now().String()}
}
|
# ------------------------------------
# Docker alias and function
# ------------------------------------
# Get latest container ID
alias dl="docker ps -l -q"
# Get container process
alias dps="docker ps"
# Get process included stop container
alias dpa="docker ps -a"
# Get images
alias di="docker images"
# Get container IP
alias dip="docker inspect --format '{{ .NetworkSettings.IPAddress }}'"
# Run deamonized container, e.g., $dkd base /bin/echo hello
alias dkd="docker run -d -P"
# Run interactive container, e.g., $dki base /bin/bash
alias dki="docker run -i -t -P"
# Execute interactive container, e.g., $dex base /bin/bash
alias dex="docker exec -i -t"
# Stop all containers
dstop() { docker stop $(docker ps -a -q); }
# Remove all containers
drm() { docker rm $(docker ps -a -q); }
# Stop and Remove all containers
alias drmf='docker stop $(docker ps -a -q) && docker rm $(docker ps -a -q)'
# Remove all images
dri() { docker rmi $(docker images -q); }
# Dockerfile build, e.g., $dbu tcnksm/test
dbu() { docker build -t=$1 .; }
# Show all alias related docker
dalias() { alias | grep 'docker' | sed "s/^\([^=]*\)=\(.*\)/\1 => \2/"| sed "s/['|\']//g" | sort; }
# Bash into running container
dbash() { docker exec -it $(docker ps -aqf "name=$1") bash; } |
import asyncio
from tornado.websocket import websocket_connect
class Client(object):
async def reg_dts(self):
self.conn = await websocket_connect(
"ws://127.0.0.1:8888/instance/dts/")
await self.conn.write_message("node")
while True:
await self.conn.write_message("ok")
msg = await self.conn.read_message()
if msg is None:
break
print(msg)
asyncio.run(main())
|
def classify(item):
item_types = {
'Apple': 'fruit',
'Ball': 'toy',
'Cat' : 'pet',
'Dog' : 'pet'
}
return item_types.get(item, 'unknown') |
<reponame>saiki/google-photos-uploader
package jp.saiki;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.util.store.FileDataStoreFactory;
import com.google.api.gax.rpc.ApiException;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.photos.library.v1.PhotosLibraryClient;
import com.google.photos.library.v1.proto.BatchCreateMediaItemsResponse;
import com.google.photos.library.v1.proto.NewMediaItem;
import com.google.photos.library.v1.proto.NewMediaItemResult;
import com.google.photos.library.v1.upload.UploadMediaItemRequest;
import com.google.photos.library.v1.upload.UploadMediaItemResponse;
import com.google.photos.library.v1.upload.UploadMediaItemResponse.Error;
import com.google.photos.library.v1.util.NewMediaItemFactory;
import com.google.photos.types.proto.MediaItem;
import com.google.rpc.Code;
import com.google.rpc.Status;
import jp.saiki.factories.PhotosLibraryClientFactory;
/**
* Hello world!
*
*/
public class App {
private static final List<String> REQUIRED_SCOPES = ImmutableList.of(
"https://www.googleapis.com/auth/photoslibrary.readonly",
"https://www.googleapis.com/auth/photoslibrary.appendonly");
private static final int BATCH_CREATION_LIMIT = 50;
private static final java.io.File DATA_STORE_DIR =
new java.io.File(System.getProperty("user.home"), ".store/google_photos_uploader");
public static void main(final String... args) {
var credential = Paths.get(System.getProperty("credential"));
var recursive = Boolean.getBoolean("recursive");
var depth = recursive ? Integer.MAX_VALUE : 1;
if (args.length < 2) {
throw new IllegalArgumentException();
}
var user = args[0];
var root = Paths.get(args[1]);
try {
HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
FileDataStoreFactory dataStoreFactory = new FileDataStoreFactory(DATA_STORE_DIR);
try (PhotosLibraryClient client = PhotosLibraryClientFactory.createClient(user, credential, REQUIRED_SCOPES, httpTransport, dataStoreFactory)) {
List<String> uploadTokens = Files.walk(root, depth).filter( path -> Files.isRegularFile(path) ).map( f -> {
try {
return upload(client, f);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
return null;
}).collect(Collectors.toList());
List<MediaItem> created = createMediaItem(client, uploadTokens);
System.out.println(created);
for (MediaItem item : created) {
System.out.println(item);
System.out.println(item.getFilename());
System.out.println(item.getProductUrl());
}
} catch (IOException | GeneralSecurityException e) {
e.printStackTrace();
}
} catch(GeneralSecurityException | IOException e) {
e.printStackTrace();;
}
}
public static String upload(PhotosLibraryClient client, Path imagePath) throws FileNotFoundException {
// Create a new upload request
// Specify the filename that will be shown to the user in Google Photos
// and the path to the file that will be uploaded
UploadMediaItemRequest uploadRequest =
UploadMediaItemRequest.newBuilder()
//filename of the media item along with the file extension
.setFileName(imagePath.toFile().getName())
.setDataFile(new RandomAccessFile(imagePath.toFile(), "r"))
.build();
// Upload and capture the response
UploadMediaItemResponse uploadResponse = client.uploadMediaItem(uploadRequest);
if (uploadResponse.getError().isPresent()) {
// If the upload results in an error, handle it
Error error = uploadResponse.getError().get();
throw (ApiException) error.getCause();
}
return uploadResponse.getUploadToken().get();
}
public static List<MediaItem> createMediaItem(PhotosLibraryClient client, List<String> uploadTokens) {
List<NewMediaItem> newItems = uploadTokens.stream().map(token -> NewMediaItemFactory.createNewMediaItem(token)).collect(Collectors.toList());
List<List<NewMediaItem>> partitionNewItems = Lists.partition(newItems, BATCH_CREATION_LIMIT);
List<MediaItem> result = new ArrayList<>(newItems.size());
for (List<NewMediaItem> items : partitionNewItems) {
BatchCreateMediaItemsResponse response = client.batchCreateMediaItems(items);
for (NewMediaItemResult itemsResponse : response.getNewMediaItemResultsList()) {
Status status = itemsResponse.getStatus();
if (status.getCode() == Code.OK_VALUE) {
// The item is successfully created in the user's library
result.add(itemsResponse.getMediaItem());
} else {
// The item could not be created. Check the status and try again
}
}
}
return result;
}
}
|
package entity
import (
"errors"
"fmt"
"regexp"
)
const (
userNameMinLength = 1
userNameMaxLength = 64
)
/** UserID**/
func ParseUserID(value string) (UserID, error) {
return ParseID(value)
}
type UserID interface {
IsEmpty() bool
Validate() error
String() string
}
/** Name **/
type UserName interface {
Validate() error
String() string
}
func ParseUserName(value string) (UserName, error) {
name := userName(value)
if err := name.Validate(); err != nil {
return userName(""), err
}
return name, nil
}
type userName string
func (n userName) Validate() error {
if length := len(n); length < userNameMinLength || length > userNameMaxLength {
return errors.New("invalid user name")
}
return nil
}
func (n userName) String() string {
return string(n)
}
/** Email **/
type Email interface {
Validate() error
String() string
}
func ParseEmail(value string) (email, error) {
parsed := email(value)
if err := parsed.Validate(); err != nil {
return "", err
}
return parsed, nil
}
type email string
func (em email) String() string {
return string(em)
}
func (em email) Validate() error {
const pattern = "[a-zA-Z0-9.!#$%&'*+\\/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\\.[a-zA-Z0-9-]+)*$"
if len(em) == 0 {
return errors.New("email is empty")
}
if regex := regexp.MustCompile(pattern); !regex.Match([]byte(em.String())) {
return fmt.Errorf("email format is invalid: \"%v\"", em)
}
return nil
}
/** User **/
type User interface {
UserID() UserID
Name() UserName
Email() Email
}
type user struct {
id UserID
name UserName
email Email
}
func NewUser(
id UserID,
name UserName,
email Email,
) User {
return &user{
id: id,
name: name,
email: email,
}
}
func (u *user) UserID() UserID {
return u.id
}
func (u *user) Name() UserName {
return u.name
}
func (u *user) Email() Email {
return u.email
}
/** Users **/
type Users []User
func (u Users) IsEmpty() bool {
return len(u) == 0
}
|
package edu.uw.tacoma.piggy.view.panel.gantt;
import java.util.List;
import edu.uw.tacoma.piggy.model.dao.TaskDAO;
import edu.uw.tacoma.piggy.model.entity.ProjectEntity;
import edu.uw.tacoma.piggy.model.entity.TaskEntity;
/**
* .... Java doc please
* @author <NAME>, <NAME>
*/
public class TaskListData
{
ProjectEntity project;
private List<TaskEntity> tasks;
/**
* @author Cuong_Tran
*
*/
public TaskListData(int projectID)
{
tasks = TaskDAO.listTask("where ProjectID = " + projectID);
}
/**
* Add new task into the list.
* @author <NAME>
* @param theTask the task to add
*/
public void add(final TaskEntity task)
{
if (tasks.add(task))
{
TaskDAO.insert(task); //Cuong test insert to Data base
}
}
/**
* update the task in the list
* @author <NAME>
* @param task the task to update
*/
public void update(final TaskEntity task)
{
for (int index = 0; index < tasks.size() - 1; index++)
{
TaskEntity current = tasks.get(index);
if (current.getTaskID() == task.getTaskID())
{
current.setDescription(task.getDescription());
current.setStartDate(task.getStartDate());
current.setDuration(task.getDuration());
current.setParentTask(task.getParentTask());
}
}
TaskDAO.update(task);
}
/**
* delete the task in the list
* @author <NAME>
* @param task the task needs to be deleted
*/
public void delete(final TaskEntity task)
{
if (tasks.remove(task))
{
TaskDAO.delete(task);
}
}
/**
* The method return the task
* @param index the index of the task
* @return the task at the given index
*/
public TaskEntity get(int index)
{
if (index < tasks.size() - 1)
return tasks.get(index);
return null;
}
public void clear()
{
tasks.clear();
}
/**
* The method returns the list of tasks
* @return
*/
public List<TaskEntity> get()
{
return tasks;
}
/**
* This returns the size of the list task.
* @author Cuong_Tran
* @return integer value of size.
*/
public int size()
{
return tasks.size();
}
/**
* @author Cuong_Tran
* @return return true if the list of task is empty.
*/
public boolean isEmpty()
{
return tasks.isEmpty();
}
/**
* @author Cuong_Tran
* Print out element in the list data to debug.
*/
// JUST FOR DEBUG
public String toString()
{
StringBuilder builder = new StringBuilder();
builder.append("List: \n");
for (TaskEntity task: tasks)
builder.append(task).append("\n");
return builder.toString();
}
}
|
package io.github.rcarlosdasilva.weixin.model.request.media;
import io.github.rcarlosdasilva.weixin.common.ApiAddress;
import io.github.rcarlosdasilva.weixin.model.request.base.BasicWeixinRequest;
/**
* 上传图文消息内的图片获取URL请求模型
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public class MediaAddMassImageRequest extends BasicWeixinRequest {
public MediaAddMassImageRequest() {
this.path = ApiAddress.URL_MEDIA_MASS_ADD_IMAGE;
}
}
|
package org.cnt.nots.asm;
/**
* @author lixinjie
* @since 2019-07-22
*/
public interface Z {
}
|
#!/usr/bin/env bash
set -e
GROUP_NAME=$1
if [ -z "$GROUP_NAME" ];
then
echo "Group name must be specified"
exit 1
fi
echo "Extracting values from group $GROUP_NAME"
OUTFILE=$GROUP_NAME.outfile.txt
rm -rf certs config
mkdir certs
mkdir config
jq -r '.["certs/core.crt"]' $OUTFILE > certs/core.crt
jq -r '.["certs/core.key"]' $OUTFILE > certs/core.key
jq -r '.["certs/root.ca.pem"]' $OUTFILE > certs/root.ca.pem
jq -r '.["config/config.json"]' $OUTFILE > config/config.json
tar cjvf $GROUP_NAME.tar.bz2 certs config
rm -rf certs config
|
#!/usr/bin/env bash
#
# A script that creates a sources.nix skeleton by looking for the latest
# commit in the specified branch.
#
# Usage: sources.sh sources-master.lst
#
# The format of each line of the input file is
# Attribute Owner Repos Branch
echo "# DO NOT EDIT"
echo "# This file has been generated by"
echo "# $ $0 $@"
echo
echo "{pkgs}:"
echo "{"
while read -r ATTRIBUTE OWNER REPOS BRANCH; do
tmpfile=$(mktemp)
tmpdir=$(mktemp -d)
echo " # Head of branch $BRANCH of repository github.com/$OWNER/$REPOS at $(date +'%F %T')"
COMMITID=$(curl --silent https://api.github.com/repos/$OWNER/$REPOS/branches/$BRANCH | jq -r '.commit.sha')
curl --silent -L https://github.com/$OWNER/$REPOS/archive/$COMMITID.tar.gz > $tmpfile
tar -C $tmpdir/ -xf $tmpfile
SHA256=$(find $tmpdir/ -maxdepth 1 -mindepth 1 -exec nix-hash --type sha256 --base32 '{}' \;)
rm -f $tmpfile
rm -rf $tmpdir
echo " $ATTRIBUTE = pkgs.fetchFromGitHub {"
echo " name = \"$ATTRIBUTE\";";
echo " owner = \"$OWNER\";";
echo " repo = \"$REPOS\";"
echo " rev = \"$COMMITID\";"
echo " sha256 = \"$SHA256\";"
echo " };"
done < $1
echo "}"
|
## VARIÁVEIS DE AMBIENTE ##
# definir a arquitetura
export ARCH=arm
export CROSS_COMPILE=arm-linux-gnueabihf-
export CC=arm-linux-gnueabihf-gcc
export RANLIB=arm-linux-gnueabihf-ranlib
export STRIP=arm-linux-gnueabihf-strip
# definir o caminho pasta de saída
export RAIZ="/home/felipe/texasSDK/board-support/glibc_build"
## INÍCIO ##
# abrir a pasta de saída
cd $RAIZ
# configurando a compilação com ./configure
/home/felipe/texasSDK/board-support/glibc-2.32-RASCUNHO/./configure \
--prefix= \
--host=arm-linux-gnueabihf
# limpar compilações anteriores
make distclean
# compilar
make -j9
# instalar
make -j9 install DESTDIR=$RAIZ
|
from Layers.layers import DenseEmbeddingNet, QNet, CNNEmbeddingNet, PolicyNet, ValueNet
class NeuralNetworkManager:
def __init__(self):
self.layers = []
def add_layer(self, layer_type, *args, **kwargs):
if layer_type == 'DenseEmbeddingNet':
layer = DenseEmbeddingNet(*args, **kwargs)
elif layer_type == 'QNet':
layer = QNet(*args, **kwargs)
elif layer_type == 'CNNEmbeddingNet':
layer = CNNEmbeddingNet(*args, **kwargs)
elif layer_type == 'PolicyNet':
layer = PolicyNet(*args, **kwargs)
elif layer_type == 'ValueNet':
layer = ValueNet(*args, **kwargs)
else:
raise ValueError("Invalid layer type")
self.layers.append(layer)
def get_layers_by_type(self, layer_type):
return [layer for layer in self.layers if isinstance(layer, globals()[layer_type])]
def get_all_layers(self):
return self.layers |
ck run program extra_env="export XOPENME_DUMP_MEMORY_INPUT_RGB=0 ; export XOPENME_DUMP_MEMORY_DEPTHRENDER=0 ; export XOPENME_DUMP_MEMORY_TRACKRENDER=0 ; export XOPENME_DUMP_MEMORY_VOLUMERENDER=0"
|
#!/bin/bash -eu
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
PATH=$PATH:$GOPATH/bin
protodir=../../pb
protoc --go_out=plugins=grpc:genproto -I $protodir $protodir/demo.proto
|
module.exports = function(user) {
localStorage.setItem("auth", JSON.stringify(user));
};
|
def two_sum(li, target):
li_set = set(li)
for num in li:
if target - num in li_set:
return True
return False |
#!/bin/sh
if [ -z "$1" ]
then
echo "Keinen Friendlyname übergeben. Pflegen Sie die Lampe manuell im Nodered Interface ein!"
else
sed -i 's/0x00158d000520ac5e/'$1'/g' /home/pi/apollo/node-red/data/flows_4532881a94df.json
fi
rm lampupdate.sh
|
import stripe
stripe.api_key = 'sk_test_YOUR_KEY'
def process_payment(card_info):
# validate the credit card information
if not stripe.Token.create(
card={
'number': card_info['number'],
'exp_month': card_info['exp_month'],
'exp_year': card_info['exp_year'],
'cvc': card_info['cvc'],
},
):
return False
# create and process the charge:
stripe.Charge.create(
amount=card_info['amount'],
currency='usd',
source=card_info['token'],
description='Example charge',
)
return True |
import sys
from setuptools import setup, find_packages
with open("README.rst") as fp:
long_description = fp.read()
install_requires = [
"requests>=2.12",
"PyYAML",
"six>=1.10.0",
"tzlocal",
]
if sys.version_info < (3,):
install_requires.extend([
"ipaddress",
])
setup(
name="pykube",
version="0.16a1",
description="Python client library for Kubernetes",
long_description=long_description,
author="<NAME>.",
author_email="<EMAIL>",
license="Apache",
url="https://github.com/kelproject/pykube",
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python",
],
zip_safe=False,
packages=find_packages(),
entry_points={
"httpie.plugins.transport.v1": [
"httpie_pykube = pykube.contrib.httpie_plugin:PyKubeTransportPlugin"
],
},
install_requires=install_requires,
extras_require={
"gcp": [
"google-auth",
"jsonpath-ng",
]
},
)
|
<gh_stars>100-1000
import { Controller } from "cx/ui";
const getBtcPrice = () =>
fetch("https://api.coindesk.com/v1/bpi/currentprice.json")
.then(x => {
if (!x.ok) throw new Error("Failed to fetch BTC price from CoinDesk.");
return x;
})
.then(x => x.json())
.then(x => {
return x.bpi.USD.rate_float;
});
export default class extends Controller {
onInit() {
this.timer = setInterval(::this.fetchPrice, 60 * 1000);
this.fetchPrice();
}
onDestroy() {
clearInterval(this.timer);
}
fetchPrice() {
getBtcPrice().then(p => {
this.store.set("btcPrice", p);
});
}
}
|
<gh_stars>1-10
#ifndef TEXTURE_H
#define TEXTURE_H
#include "../stdfx.h"
#include <FreeImagePlus.h>
#include <boost/filesystem/path.hpp>
namespace Zephyr
{
namespace Common
{
class ZEPHYR_COMMON_API Texture
{
public:
Texture();
Texture(const boost::filesystem::path& path);
virtual ~Texture();
bool isValid() const;
boost::filesystem::path getPath() const;
fipImage getRawData();
protected:
bool loadFromFile(const boost::filesystem::path& path);
private:
bool bValid;
boost::filesystem::path mPath;
fipImage mFreeImageData;
};
}
}
#endif
|
SELECT address, COUNT (DISTINCT customer_id)
FROM customer
GROUP BY address
HAVING COUNT (DISTINCT customer_id) > 1; |
import csv
import os
def calculate_csv_column_sum(file_path: str, column_index: int) -> float:
column_sum = 0.0
with open(file_path, 'r') as csv_file:
csv_reader = csv.reader(csv_file)
next(csv_reader) # Skip header if present
for row in csv_reader:
try:
column_value = float(row[column_index])
column_sum += column_value
except (ValueError, IndexError):
pass # Ignore non-numeric or out-of-bounds values
return column_sum
# Create a temporary CSV file for testing
with tempfile.NamedTemporaryFile(mode='w', delete=False) as temp_file:
temp_file.write("Name,Age,Salary\n")
temp_file.write("John,25,50000\n")
temp_file.write("Alice,30,60000\n")
temp_file.write("Bob,28,55000\n")
temp_file_path = temp_file.name
column_index = 2 # Index of the 'Salary' column
# Test the function with the temporary CSV file
result = calculate_csv_column_sum(temp_file_path, column_index)
print(f"The sum of the values in column {column_index} is: {result}")
# Clean up the temporary file
os.remove(temp_file_path) |
./vendor/bin/tester -c tests/php-unix.ini tests
|
public class MyClass {
public int sum(int a, int b) {
return a + b;
}
public int product(int a, int b) {
return a * b;
}
} |
#!/bin/bash
cities=(Warszawa Krakow Gdansk)
# START OMIT
for city in "${cities[*]}"; do
echo "$city"
done
# END OMIT |
<gh_stars>0
package com.epul.oeuvre.controller;
import com.epul.oeuvre.domains.EntityAdherent;
import com.epul.oeuvre.domains.EntityOeuvrepret;
import com.epul.oeuvre.domains.EntityOeuvrevente;
import com.epul.oeuvre.domains.EntityProprietaire;
import com.epul.oeuvre.meserreurs.InsufficientFundsException;
import com.epul.oeuvre.meserreurs.InsufficientRightsException;
import com.epul.oeuvre.meserreurs.WrongAdherentException;
import com.epul.oeuvre.persistence.service.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.util.List;
@RequestMapping("/oeuvrevente")
@RestController
@CrossOrigin
public class ControllerOeuvrevente{
@Autowired
private ServiceOeuvrevente serviceOeuvrevente = new ServiceOeuvrevente();
@Autowired
private ServiceUtilisateur serviceUtilisateur = new ServiceUtilisateur();
@Autowired
private ServiceReservation serviceReservation = new ServiceReservation();
@Autowired
private ServiceProprietaire serviceProprietaire = new ServiceProprietaire();
@Autowired
private ServiceAdherent serviceAdherent = new ServiceAdherent();
@RequestMapping(method = RequestMethod.GET, value = "/getOeuvres")
public ModelAndView getOeuvresvente(HttpServletRequest request,
HttpServletResponse response) throws Exception {
String destinationPage="";
List<EntityOeuvrevente> mesOeuvresLibres = null;
List<EntityOeuvrevente> mesOeuvresReservees = null;
List<EntityOeuvrevente> mesOeuvresVendues = null;
try {
mesOeuvresLibres = serviceOeuvrevente.getOeuvresLibres();
mesOeuvresReservees = serviceOeuvrevente.getOeuvresReservees();
mesOeuvresVendues = serviceOeuvrevente.getOeuvresVendues();
request.setAttribute("mesOeuvresLibres", mesOeuvresLibres);
request.setAttribute("mesOeuvresReservees", mesOeuvresReservees);
request.setAttribute("mesOeuvresVendues", mesOeuvresVendues);
destinationPage = "vues/listerOeuvrevente";
} catch (Exception e) {
request.setAttribute("MesErreurs", e.getMessage());
destinationPage = "vues/Erreur";
}
return new ModelAndView(destinationPage);
}
@RequestMapping(method = RequestMethod.POST,value = "/deleteOeuvre")
public ModelAndView deleteOeuvrevente(HttpServletRequest request,
HttpServletResponse response,
@RequestParam(value = "id") int id) throws Exception {
String destinationPage = "";
try {
HttpSession session = request.getSession();
if(Integer.parseInt(session.getAttribute("isAdmin").toString()) == 1) {
serviceOeuvrevente.deleteOeuvreById(id);
response.sendRedirect("/oeuvrevente/getOeuvres");
}else {
throw new InsufficientRightsException();
}
} catch (Exception e) {
request.setAttribute("MesErreurs", e.getMessage());
destinationPage = "/vues/Erreur";
}
return new ModelAndView(destinationPage);
}
@RequestMapping(method = RequestMethod.POST,value = "/reserverOeuvre")
public ModelAndView reserverOeuvrevente(HttpServletRequest request,
HttpServletResponse response,
@RequestParam(value = "id") int id) throws Exception {
String destinationPage = "";
try {
HttpSession session = request.getSession();
Integer idAdh = Integer.parseInt(session.getAttribute("idAdh").toString());
EntityAdherent adh = serviceAdherent.getAdherent(idAdh);
EntityOeuvrevente oeuvrevente = serviceOeuvrevente.getOeuvreById(id);
EntityProprietaire prop = serviceProprietaire.getProprietaire(oeuvrevente.getProprietaireByIdProprietaire().getIdProprietaire());
serviceReservation.addReservation(oeuvrevente, adh, prop);
serviceOeuvrevente.updateEtatOeuvre(id, "R");
response.sendRedirect("/oeuvrevente/getOeuvres");
} catch (Exception e) {
request.setAttribute("MesErreurs", e.getMessage());
destinationPage = "/vues/Erreur";
}
return new ModelAndView(destinationPage);
}
@RequestMapping(method = RequestMethod.POST,value = "/buyOeuvre")
public ModelAndView buyOeuvrevente(HttpServletRequest request,
HttpServletResponse response,
@RequestParam(value = "id") int id) throws Exception {
String destinationPage = "";
try {
EntityOeuvrevente tempO = serviceOeuvrevente.getOeuvreById(id);
HttpSession session = request.getSession();
double solde = Double.parseDouble(session.getAttribute("solde").toString());
if(solde >= tempO.getPrixOeuvrevente()) {
Integer idAdh = Integer.parseInt(session.getAttribute("idAdh").toString());
EntityAdherent tempA = serviceAdherent.getAdherent(idAdh);
EntityProprietaire tempP = serviceProprietaire.getProprietaire(tempA.getIdAdherent());
if(tempP == null){
serviceProprietaire.addProprietaire(tempA.getIdAdherent(), tempA.getNomAdherent(), tempA.getPrenomAdherent());
tempP = serviceProprietaire.getProprietaire(tempA.getIdAdherent());
}
double newSolde = solde - tempO.getPrixOeuvrevente();
session.setAttribute("solde", newSolde);
serviceUtilisateur.updateSolde(session.getAttribute("nom").toString(), newSolde);
serviceOeuvrevente.updateEtatOeuvre(id, "V");
serviceOeuvrevente.updateProprietaireOeuvre(id, tempP);
serviceReservation.deleteReservation(tempA, tempO);
response.sendRedirect("/oeuvrevente/getOeuvres");
}else {
throw new InsufficientFundsException();
}
} catch (Exception e) {
request.setAttribute("MesErreurs", e.getMessage());
destinationPage = "/vues/Erreur";
}
return new ModelAndView(destinationPage);
}
//Modif : affichage
@RequestMapping(method = RequestMethod.GET, value = "/updateViewOeuvrevente")
public ModelAndView updateViewOeuvrevente(HttpServletRequest request, HttpServletResponse response,
@RequestParam(value = "id") int id) throws Exception{
String destinationPage ="";
try {
HttpSession session = request.getSession();
if(Integer.parseInt(session.getAttribute("isAdmin").toString()) == 1) {
EntityOeuvrevente oeuvre = serviceOeuvrevente.getOeuvreById(id);
List<EntityProprietaire> allProp = serviceProprietaire.getAllProprietaire();
request.setAttribute("oeuvre", oeuvre);
request.setAttribute("allProp", allProp);
destinationPage = "vues/modifierOeuvrevente";
}else {
throw new InsufficientRightsException();
}
} catch (WrongAdherentException wa){
request.setAttribute("MesErreurs", wa.getMessage());
destinationPage = "vues/Erreur";
}
return new ModelAndView(destinationPage);
}
//MODIF -> POST
@RequestMapping(method = RequestMethod.POST, value = "/updateOeuvrevente")
public ModelAndView updateOeuvrevente(HttpServletRequest request, HttpServletResponse response) throws Exception{
String destinationPage ="";
try {
HttpSession session = request.getSession();
if(Integer.parseInt(session.getAttribute("isAdmin").toString()) == 1) {
Integer idP = Integer.parseInt(request.getParameter("idProprietaire"));
Integer idOeuvre = Integer.parseInt(request.getParameter("idOeuvre"));
Double prix = Double.parseDouble(request.getParameter("prix"));
String titre = request.getParameter("titre");
EntityProprietaire tempP = serviceProprietaire.getProprietaire(idP);
serviceOeuvrevente.updateOeuvrevente(idOeuvre, titre, prix, tempP);
response.sendRedirect("/oeuvrevente/getOeuvres");
} else {
throw new InsufficientRightsException();
}
} catch (WrongAdherentException wa){
request.setAttribute("MesErreurs", wa.getMessage());
destinationPage = "vues/Erreur";
}
return new ModelAndView(destinationPage);
}
//Ajout -> affichage
@RequestMapping(method = RequestMethod.GET, value = "/addViewOeuvrevente")
public ModelAndView addViewOeuvrevente(HttpServletRequest request, HttpServletResponse response) throws Exception{
String destinationPage ="";
try {
HttpSession session = request.getSession();
if(Integer.parseInt(session.getAttribute("isAdmin").toString()) == 1) {
List<EntityProprietaire> allProp = serviceProprietaire.getAllProprietaire();
request.setAttribute("allProp", allProp);
destinationPage = "vues/addOeuvrevente";
}else {
throw new InsufficientRightsException();
}
} catch (WrongAdherentException wa){
request.setAttribute("MesErreurs", wa.getMessage());
destinationPage = "vues/Erreur";
}
return new ModelAndView(destinationPage);
}
//Ajout -> POST
@RequestMapping(method = RequestMethod.POST, value = "/addOeuvrevente")
public ModelAndView addOeuvrevente(HttpServletRequest request, HttpServletResponse response) throws Exception{
String destinationPage ="";
try {
HttpSession session = request.getSession();
if(Integer.parseInt(session.getAttribute("isAdmin").toString()) == 1) {
Integer idP = Integer.parseInt(request.getParameter("idProprietaire"));
EntityProprietaire tempP = serviceProprietaire.getProprietaire(idP);
String titre = request.getParameter("titre");
Double prix = Double.parseDouble(request.getParameter("prix"));
serviceOeuvrevente.addOeuvrevente(titre, prix, tempP);
response.sendRedirect("/oeuvrevente/getOeuvres");
} else {
throw new InsufficientRightsException();
}
} catch (WrongAdherentException wa){
request.setAttribute("MesErreurs", wa.getMessage());
destinationPage = "vues/Erreur";
}
return new ModelAndView(destinationPage);
}
}
|
package disgordbot
type Module interface{}
|
import React, { useEffect, useContext, useState } from 'react';
import { ToastContainer, toast } from 'react-toastify';
import { StoreContext } from "../Store/store.js";
import 'react-toastify/dist/ReactToastify.css';
// Toasty notifications
function Notify(props) {
// Store states
const { store } = useContext(StoreContext);
const [txHash, setTxHash] = useState(false)
useEffect(() => {
if (props.states.isNotify["msg"]) {
toast[(props.states.isNotify["type"] ? props.states.isNotify["type"] : "info")](props.states.isNotify["msg"], {
position: "bottom-right",
autoClose: 5000,
hideProgressBar: true,
pauseOnHover: true,
draggable: true,
progress: undefined,
});
setTxHash(props.states.isNotify["txHash"])
props.states.setNotify({})
}
}, [props])
const viewTx = () => {
if (txHash) {
store.madNetAdapter.viewTransaction(txHash, true);
}
return
}
return (
<ToastContainer
position="bottom-right"
autoClose={5000}
hideProgressBar
newestOnTop
rtl={false}
pauseOnFocusLoss={false}
draggable
pauseOnHover
onClick={() => viewTx()}
/>
)
}
export default Notify; |
class Stack:
def __init__(self):
self.items = []
def is_empty(self):
return len(self.items) == 0
# Driver code
s = Stack()
print(s.is_empty()) |
<?hh // strict
namespace Waffle\Log\Processor;
use type Waffle\Log\record;
class CallableProcessor implements ProcessorInterface
{
public function __construct(
protected (function(record): record) $callable
) {}
public function process(record $record): record
{
$fun = $this->callable;
return $fun($record);
}
}
|
class ValidationError(Exception):
pass
def count_unique_elements(input_list):
if not all(isinstance(x, int) for x in input_list) or len(input_list) == 0:
raise ValidationError("Invalid input: list must contain only integers and cannot be empty")
return len(set(input_list)) |
<filename>snippets/tests/test_forms.py
from django.test import TestCase
from snippets.models import Snippet
from snippets.forms import SnippetForm
class SnippetFormTests(TestCase):
def test_valid(self):
params = {
'title': 'hello world',
'code': 'print("Hello World")',
'description': 'Just printing "Hello World"',
}
snippet = Snippet()
form = SnippetForm(params, instance=snippet)
self.assertTrue(form.is_valid())
def test_should_specify_title(self):
params = {}
snippet = Snippet()
form = SnippetForm(params, instance=snippet)
self.assertFalse(form.is_valid())
|
package org.perm.testgenerator;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.HashMap;
import java.util.InvalidPropertiesFormatException;
import org.perm.testgenerator.dataset.DataAndQueryGenerator;
public class TestGenerator {
public static int settingNum;
private static String PACKAGE_NAME = "org.perm.autotests";
private static String packageDir = "";
private static String testMethodString = "\n\n\t@Test\n"
+ "\tpublic void testNAME () throws SQLException, Exception {\n"
+ "\t\ttestSingleQuery(NUM);\n"
+ "\t}\n";
private HashMap<String,PermSuite> suites;
private String TestCase;
private String TestSuite;
private String OptionsTestCase;
private File testDir;
private String packageName;
private PermSuite allTests;
public TestGenerator (File testDir, String packageName) throws IOException {
this.testDir = testDir;
ConnectionOptions.getInstance().setPath(testDir.getPath());
OptionsManager.getInstance().reloadOptions();
this.packageName = packageName;
TestCase = readString ("resource/TestTemplates/TestCase.java");
TestSuite = readString ("resource/TestTemplates/TestSuite.java");
OptionsTestCase = readString ("resource/TestTemplates/TestCaseSetOptions.java");
packageDir = packageName.replaceAll("\\.", "/");
}
public static void main (String[] args) throws InvalidPropertiesFormatException, FileNotFoundException, IOException {
TestGenerator gen;
File dir;
dir = new File ("resource/sql/");
gen = new TestGenerator (dir, PACKAGE_NAME);
gen.generateTests();
gen.generateOptionsSuites();
dir = new File ("resource/tpchValidation/");
gen = new TestGenerator (dir, "org.perm.autotests.tpch");
gen.generateTests();
gen.generateOptionsSuites();
dir = new File ("resource/wherecs/");
gen = new TestGenerator (dir, "org.perm.autotests.wherecs");
gen.generateTests();
gen.generateOptionsSuites();
dir = new File ("resource/howcs/");
gen = new TestGenerator (dir, "org.perm.autotests.howcs");
gen.generateTests();
gen.generateOptionsSuites();
}
public void generateTests () throws InvalidPropertiesFormatException, FileNotFoundException, IOException {
for (int i = 0; i < OptionsManager.getInstance().getNumSettings(); i++) {
settingNum = i + 1;
suites = new HashMap<String,PermSuite> ();
allTests = new PermSuite ("AllTests_" + settingNum);
suites.put("allTests_" + settingNum, allTests);
//allTests.addChild(new PermSuite("SetOptions_" + (i + 1)));
generateTestRun();
}
}
public void generateTestRun () throws InvalidPropertiesFormatException, FileNotFoundException, IOException {
File[] files;
DataAndQueryGenerator generator;
String name;
files = testDir.listFiles();
for (int i = 0; i < files.length; i++) {
name = files[i].getName();
if (name.compareTo("template.xml") != 0 && name.compareTo("settings.xml") != 0 && name.endsWith(".xml")) {
name = name.substring(0, name.length() - 4);
generateSuitesFromFileName (name);
System.out.println("create Generator for " + name);
generator = new DataAndQueryGenerator (files[i].getAbsolutePath());
generateTest (generator, name);
}
}
finalizeSuites ();
}
public void generateOptionsSuites () throws InvalidPropertiesFormatException, FileNotFoundException, IOException {
PermSuite optionSuite;
String output;
optionSuite = new PermSuite ("AllTestsOptions");
for (int i = 0; i < OptionsManager.getInstance().getNumSettings(); i++) {
generateSetOption (i + 1);
optionSuite.addChildWithDupes(new PermSuite("AllTests_" + (i + 1)));
}
output = TestSuite;
output = output.replace("PACKAGE", packageName);
output = output.replace("NAME", optionSuite.getName());
output = output.replace("CHILDREN", optionSuite.getClassText());
writeFile(optionSuite.getName(), output);
}
private void generateSetOption (int optionNum) throws IOException {
String output;
output = OptionsTestCase;
output = output.replace("PACKAGE", packageName);
output = output.replaceAll("NAME", "SetOptions_" + optionNum);
output = output.replace("SETTING", "" + optionNum);
writeFile("SetOptions_" + optionNum, output);
}
private void finalizeSuites () throws IOException {
java.util.Iterator<String> iter;
PermSuite suite;
String output;
allTests.addChild(new PermSuite("ReportPrinter"));
iter = suites.keySet().iterator();
while (iter.hasNext()) {
suite = suites.get(iter.next());
if (suite.getChildren().size() > 0) {
output = TestSuite;
output = output.replace("PACKAGE", packageName);
output = output.replace("NAME", suite.getClassName());
output = output.replace("CHILDREN", suite.getClassText());
writeFile(suite.getClassName(), output);
}
}
}
private void generateTest (DataAndQueryGenerator generator, String name) throws IOException {
String output;
StringBuffer tests;
PermSuite suite;
String runName;
tests = new StringBuffer ();
runName = generateName (name);
suite = suites.get(runName);
output = TestCase;
output = output.replace("$PACKAGE", packageName);
output = output.replace("$NAME", suite.getClassName());
output = output.replace("$FILE", suite.getFileName());
output = output.replace("$SETTING", "" + settingNum);
output = output.replace("$PATH", this.getTestDir().toString() + "/");
for (int i = 1; i <= generator.getNumTest(); i++) {
if (!generator.isInExcludes(settingNum, i))
tests.append(testMethodString.replace("NAME", "Query_" + i).replace("NUM", i + ""));
}
output = output.replace("$TESTS", tests.toString());
writeFile(suite.getClassName(), output);
}
private String generateName (String name) {
String[] parts;
String result;
parts = name.split("\\.");
for (int i = 0; i < parts.length; i++) {
parts[i] = parts[i] + "_" + settingNum;
}
result = parts[0];
for (int i = 1; i < parts.length; i++) {
result = result + "." + parts[i];
}
return result;
}
private void generateSuitesFromFileName (String fileName) {
String[] parts;
String curName;
PermSuite curSuite;
PermSuite oldSuite;
System.out.println("create Suites for " + fileName + " " + settingNum);
parts = fileName.split("\\.");
curName = "";
oldSuite = allTests;
for (int i = 0; i < parts.length; i++) {
parts[i] = parts[i] + "_" + settingNum;
}
for (int i = 0; i < parts.length; i++) {
if (curName.compareTo("") == 0) {
curName = parts[i];
}
else {
curName = curName + "." + parts[i];
}
if (suites.containsKey(curName)) {
curSuite = suites.get(curName);
}
else {
curSuite = new PermSuite (curName);
}
suites.put(curName, curSuite);
oldSuite.addChild(curSuite);
oldSuite = curSuite;
}
}
private void writeFile (String name, String content) throws IOException {
File outFile;
FileWriter writer;
outFile = new File ("src/" + packageDir + "/" + name + ".java");
outFile.createNewFile();
writer = new FileWriter (outFile);
writer.write(content);
writer.close();
}
private String readString (String fileName) throws IOException {
File file;
FileReader reader;
BufferedReader bufRead;
StringBuffer result;
result = new StringBuffer ();
file = new File (fileName);
reader = new FileReader (file);
bufRead = new BufferedReader (reader);
while (bufRead.ready()) {
result.append(bufRead.readLine() + "\n");
}
return result.toString();
}
public File getTestDir () {
return testDir;
}
public void setTestDir (File testDir) {
this.testDir = testDir;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.