text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
set -e -o pipefail
# cd to root of repo
cd "$( dirname "${BASH_SOURCE[0]}" )"/../../
if [[ ! -e "webpack-stats.json" ]]
then
echo "Please start the webpack dev server before running this script."
exit 1
fi
source ./scripts/envs.sh
if [[ -z "$WEBPACK_SELENIUM_DEV_SERVER_HOST" ]]
then
echo "WEBPACK_SELENIUM_DEV_SERVER_HOST is missing. Do you have docker-machine configured correctly?"
exit 1
fi
# Start hub and chrome containers
YML_ARGS="-f docker-compose.yml -f docker-compose.override.yml -f docker-compose.selenium.yml"
docker-compose ${YML_ARGS} up -d
# Run tests
docker-compose ${YML_ARGS} run -v "$PWD:/src" \
-e MICROMASTERS_USE_WEBPACK_DEV_SERVER=True \
-e RUNNING_SELENIUM=true \
-e FEATURE_OPEN_DISCUSSIONS_POST_UI=false \
-e WEBPACK_DEV_SERVER_HOST="$WEBPACK_SELENIUM_DEV_SERVER_HOST" \
--service-ports \
selenium ./manage.py snapshot_learners_states $@
|
package main
import (
litmusLIB "github.com/litmuschaos/litmus-go/chaoslib/litmus/container-kill/lib"
pumbaLIB "github.com/litmuschaos/litmus-go/chaoslib/pumba/container-kill/lib"
clients "github.com/litmuschaos/litmus-go/pkg/clients"
"github.com/litmuschaos/litmus-go/pkg/events"
experimentEnv "github.com/litmuschaos/litmus-go/pkg/generic/container-kill/environment"
experimentTypes "github.com/litmuschaos/litmus-go/pkg/generic/container-kill/types"
"github.com/litmuschaos/litmus-go/pkg/log"
"github.com/litmuschaos/litmus-go/pkg/probe"
"github.com/litmuschaos/litmus-go/pkg/result"
"github.com/litmuschaos/litmus-go/pkg/status"
"github.com/litmuschaos/litmus-go/pkg/types"
"github.com/litmuschaos/litmus-go/pkg/utils/common"
"github.com/sirupsen/logrus"
)
func init() {
// Log as JSON instead of the default ASCII formatter.
logrus.SetFormatter(&logrus.TextFormatter{
FullTimestamp: true,
DisableSorting: true,
DisableLevelTruncation: true,
})
}
func main() {
var err error
experimentsDetails := experimentTypes.ExperimentDetails{}
resultDetails := types.ResultDetails{}
eventsDetails := types.EventDetails{}
clients := clients.ClientSets{}
chaosDetails := types.ChaosDetails{}
//Getting kubeConfig and Generate ClientSets
if err := clients.GenerateClientSetFromKubeConfig(); err != nil {
log.Fatalf("Unable to Get the kubeconfig, err: %v", err)
}
//Fetching all the ENV passed from the runner pod
log.Infof("[PreReq]: Getting the ENV for the %v experiment", experimentsDetails.ExperimentName)
experimentEnv.GetENV(&experimentsDetails)
// Intialise the chaos attributes
experimentEnv.InitialiseChaosVariables(&chaosDetails, &experimentsDetails)
// Intialise Chaos Result Parameters
types.SetResultAttributes(&resultDetails, chaosDetails)
// Intialise the probe details
probe.InitializeProbesInChaosResultDetails(&chaosDetails, clients, &resultDetails)
//Updating the chaos result in the beginning of experiment
log.Infof("[PreReq]: Updating the chaos result of %v experiment (SOT)", experimentsDetails.ExperimentName)
err = result.ChaosResult(&chaosDetails, clients, &resultDetails, "SOT")
if err != nil {
log.Errorf("Unable to Create the Chaos Result, err: %v", err)
failStep := "Updating the chaos result of container-kill experiment (SOT)"
result.RecordAfterFailure(&chaosDetails, &resultDetails, failStep, clients, &eventsDetails)
return
}
// Set the chaos result uid
result.SetResultUID(&resultDetails, clients, &chaosDetails)
// generating the event in chaosresult to marked the verdict as awaited
msg := "experiment: " + experimentsDetails.ExperimentName + ", Result: Awaited"
types.SetResultEventAttributes(&eventsDetails, types.AwaitedVerdict, msg, "Normal", &resultDetails)
events.GenerateEvents(&eventsDetails, clients, &chaosDetails, "ChaosResult")
//DISPLAY THE APP INFORMATION
log.InfoWithValues("The application information is as follows", logrus.Fields{
"Namespace": experimentsDetails.AppNS,
"Label": experimentsDetails.AppLabel,
"Ramp Time": experimentsDetails.RampTime,
})
// Calling AbortWatcher go routine, it will continuously watch for the abort signal for the entire chaos duration and generate the required events and result
// It is being invoked here, as opposed to within the chaoslib, as these experiments do not need additional recovery/chaos revert steps like in case of network experiments
go common.AbortWatcher(experimentsDetails.ExperimentName, clients, &resultDetails, &chaosDetails, &eventsDetails)
//PRE-CHAOS APPLICATION STATUS CHECK
log.Info("[Status]: Verify that the AUT (Application Under Test) is running (pre-chaos)")
err = status.CheckApplicationStatus(experimentsDetails.AppNS, experimentsDetails.AppLabel, experimentsDetails.Timeout, experimentsDetails.Delay, clients)
if err != nil {
log.Errorf("Application status check failed, err: %v", err)
failStep := "Verify that the AUT (Application Under Test) is running (pre-chaos)"
result.RecordAfterFailure(&chaosDetails, &resultDetails, failStep, clients, &eventsDetails)
return
}
if experimentsDetails.EngineName != "" {
// marking AUT as running, as we already checked the status of application under test
msg := "AUT: Running"
// run the probes in the pre-chaos check
if len(resultDetails.ProbeDetails) != 0 {
err = probe.RunProbes(&chaosDetails, clients, &resultDetails, "PreChaos", &eventsDetails)
if err != nil {
log.Errorf("Probe failed, err: %v", err)
failStep := "Failed while adding probe"
msg := "AUT: Running, Probes: Unsuccessful"
types.SetEngineEventAttributes(&eventsDetails, types.PreChaosCheck, msg, "Warning", &chaosDetails)
events.GenerateEvents(&eventsDetails, clients, &chaosDetails, "ChaosEngine")
result.RecordAfterFailure(&chaosDetails, &resultDetails, failStep, clients, &eventsDetails)
return
}
msg = "AUT: Running, Probes: Successful"
}
// generating the events for the pre-chaos check
types.SetEngineEventAttributes(&eventsDetails, types.PreChaosCheck, msg, "Normal", &chaosDetails)
events.GenerateEvents(&eventsDetails, clients, &chaosDetails, "ChaosEngine")
}
// Including the litmus lib for container-kill
if experimentsDetails.ChaosLib == "litmus" && (experimentsDetails.ContainerRuntime == "containerd" || experimentsDetails.ContainerRuntime == "crio") {
err = litmusLIB.PrepareContainerKill(&experimentsDetails, clients, &resultDetails, &eventsDetails, &chaosDetails)
if err != nil {
failStep := "failed in chaos injection phase"
result.RecordAfterFailure(&chaosDetails, &resultDetails, failStep, clients, &eventsDetails)
log.Fatalf("Chaos injection failed, err: %v", err)
}
} else if experimentsDetails.ChaosLib == "litmus" && experimentsDetails.ContainerRuntime == "docker" {
err = pumbaLIB.PrepareContainerKill(&experimentsDetails, clients, &resultDetails, &eventsDetails, &chaosDetails)
if err != nil {
failStep := "failed in chaos injection phase"
result.RecordAfterFailure(&chaosDetails, &resultDetails, failStep, clients, &eventsDetails)
log.Fatalf("Chaos injection failed, err: %v", err)
}
} else {
failStep := "lib and container-runtime combination not supported!"
result.RecordAfterFailure(&chaosDetails, &resultDetails, failStep, clients, &eventsDetails)
log.Fatal("lib and container-runtime combination not supported, provide the correct value of lib & container-runtime")
}
log.Infof("[Confirmation]: %v chaos has been injected successfully", experimentsDetails.ExperimentName)
resultDetails.Verdict = "Pass"
//POST-CHAOS APPLICATION STATUS CHECK
log.Info("[Status]: Verify that the AUT (Application Under Test) is running (post-chaos)")
err = status.CheckApplicationStatus(experimentsDetails.AppNS, experimentsDetails.AppLabel, experimentsDetails.Timeout, experimentsDetails.Delay, clients)
if err != nil {
log.Errorf("Application status check failed, err: %v", err)
failStep := "Verify that the AUT (Application Under Test) is running (post-chaos)"
result.RecordAfterFailure(&chaosDetails, &resultDetails, failStep, clients, &eventsDetails)
return
}
if experimentsDetails.EngineName != "" {
// marking AUT as running, as we already checked the status of application under test
msg := "AUT: Running"
// run the probes in the post-chaos check
if len(resultDetails.ProbeDetails) != 0 {
err = probe.RunProbes(&chaosDetails, clients, &resultDetails, "PostChaos", &eventsDetails)
if err != nil {
log.Errorf("Unable to Add the probes, err: %v", err)
failStep := "Failed while adding probe"
msg := "AUT: Running, Probes: Unsuccessful"
types.SetEngineEventAttributes(&eventsDetails, types.PostChaosCheck, msg, "Warning", &chaosDetails)
events.GenerateEvents(&eventsDetails, clients, &chaosDetails, "ChaosEngine")
result.RecordAfterFailure(&chaosDetails, &resultDetails, failStep, clients, &eventsDetails)
return
}
msg = "AUT: Running, Probes: Successful"
}
// generating post chaos event
types.SetEngineEventAttributes(&eventsDetails, types.PostChaosCheck, msg, "Normal", &chaosDetails)
events.GenerateEvents(&eventsDetails, clients, &chaosDetails, "ChaosEngine")
}
//Updating the chaosResult in the end of experiment
log.Infof("[The End]: Updating the chaos result of %v experiment (EOT)", experimentsDetails.ExperimentName)
err = result.ChaosResult(&chaosDetails, clients, &resultDetails, "EOT")
if err != nil {
log.Fatalf("Unable to Update the Chaos Result, err: %v", err)
}
// generating the event in chaosresult to marked the verdict as pass/fail
msg = "experiment: " + experimentsDetails.ExperimentName + ", Result: " + resultDetails.Verdict
reason := types.PassVerdict
eventType := "Normal"
if resultDetails.Verdict != "Pass" {
reason = types.FailVerdict
eventType = "Warning"
}
types.SetResultEventAttributes(&eventsDetails, reason, msg, eventType, &resultDetails)
events.GenerateEvents(&eventsDetails, clients, &chaosDetails, "ChaosResult")
if experimentsDetails.EngineName != "" {
msg := experimentsDetails.ExperimentName + " experiment has been " + resultDetails.Verdict + "ed"
types.SetEngineEventAttributes(&eventsDetails, types.Summary, msg, "Normal", &chaosDetails)
events.GenerateEvents(&eventsDetails, clients, &chaosDetails, "ChaosEngine")
}
}
|
function averageThreeNumbers(num1, num2, num3){
return (num1 + num2 + num3) / 3;
} |
<gh_stars>1-10
// /*
// Copyright 2020 Kaloom Inc.
// Copyright 2014 The Kubernetes Authors.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// */
package crioruntime
import (
"context"
"encoding/json"
"fmt"
"strings"
"time"
"github.com/golang/glog"
"github.com/pkg/errors"
"google.golang.org/grpc"
pb "k8s.io/cri-api/pkg/apis/runtime/v1alpha2"
"k8s.io/kubernetes/pkg/kubelet/util"
)
const (
crioNetNSFmt = "/var/run/netns/%s"
)
// CrioRuntime runtime object
type CrioRuntime struct {
client pb.RuntimeServiceClient
}
type PodStatusResponseInfo struct {
SandboxId string
RunTimeSpec RuneTimeSpecInfo
}
type RuneTimeSpecInfo struct {
Linux NamespacesInfo
}
type NamespacesInfo struct {
NameSpaces []NameSpaceInfo
}
type NameSpaceInfo struct {
Type string
Path string
}
// GetNetNS returns the network namespace of the given containerID. The ID
// supplied is typically the ID of a pod sandbox. This getter doesn't try
// to map non-sandbox IDs to their respective sandboxes.
func (cr *CrioRuntime) GetNetNS(podSandboxID string) (string, error) {
glog.V(4).Infof("GetNetNS:podSandboxID:%s", podSandboxID)
if podSandboxID == "" {
return "", fmt.Errorf("ID cannot be empty")
}
request := &pb.PodSandboxStatusRequest{
PodSandboxId: podSandboxID,
Verbose: true, // TODO see with non verbose if all info is there
}
glog.V(5).Infof("PodSandboxStatusRequest: %v", request)
r, err := cr.client.PodSandboxStatus(context.Background(), request)
glog.V(5).Infof("PodSandboxStatusResponse: %v", r)
if err != nil {
return "", err
}
mapInfo := r.GetInfo()
glog.V(5).Infof("GetNetNS:GetInfo():%s", mapInfo)
var podStatusResponseInfo PodStatusResponseInfo
info := mapInfo["info"]
glog.V(5).Infof("GetNetNS:info:%s", info)
err = json.Unmarshal([]byte(info), &podStatusResponseInfo)
if err != nil {
glog.Errorf("GetNetNS:error decoding response: %v", err)
if e, ok := err.(*json.SyntaxError); ok {
glog.Errorf("GetNetNS:syntax error at byte offset %d", e.Offset)
}
return "", err
}
namespaces := podStatusResponseInfo.RunTimeSpec.Linux.NameSpaces
glog.V(5).Infof("GetNetNS:RunTimeSpec.Linux.NameSpaces: %v", namespaces)
for _, namespace := range namespaces {
if namespace.Type == "network" {
ss := strings.Split(namespace.Path, "/")
netNS := ss[len(ss)-1]
glog.V(5).Infof("GetNetNS:NetNS:%s", netNS)
return fmt.Sprintf(crioNetNSFmt, netNS), nil
}
}
return "", nil
}
// GetSandboxID returns kubernete's crio sandbox container ID
func (cr *CrioRuntime) GetSandboxID(containerID string) (string, error) {
glog.V(5).Infof("GetSandboxID:containerID:%s", containerID)
if containerID == "" {
return "", fmt.Errorf("ID cannot be empty")
}
filter := &pb.ContainerFilter{
Id: containerID,
}
request := &pb.ListContainersRequest{
Filter: filter,
}
glog.V(5).Infof("ListContainerRequest: %v", request)
r, err := cr.client.ListContainers(context.Background(), request)
glog.V(5).Infof("ListContainerResponse: %v", r)
if err != nil {
return "", err
}
containerslist := r.GetContainers()
if len(containerslist) == 0 {
return "", fmt.Errorf("Didn't find any container with containerID:%s", containerID)
} else if len(containerslist) != 1 {
return "", fmt.Errorf("Found more then one container with containerID:%s", containerID)
}
sandboxID := containerslist[0].PodSandboxId
glog.V(5).Infof("ContainerStatusResponse:SandboxId %s", sandboxID)
return sandboxID, nil
}
func getConnection(endPoints []string, timeOut time.Duration) (*grpc.ClientConn, error) {
if endPoints == nil || len(endPoints) == 0 {
return nil, fmt.Errorf("endpoint is not set")
}
endPointsLen := len(endPoints)
var conn *grpc.ClientConn
for indx, endPoint := range endPoints {
glog.Infof("connect using endpoint '%s' with '%s' timeout", endPoint, timeOut)
addr, dialer, err := util.GetAddressAndDialer(endPoint)
if err != nil {
if indx == endPointsLen-1 {
return nil, err
}
glog.Error(err)
continue
}
conn, err = grpc.Dial(addr, grpc.WithInsecure(), grpc.WithBlock(), grpc.WithTimeout(timeOut), grpc.WithContextDialer(dialer))
if err != nil {
errMsg := errors.Wrapf(err, "connect endpoint '%s', make sure you are running as root and the endpoint has been started", endPoint)
if indx == endPointsLen-1 {
return nil, errMsg
}
glog.Error(errMsg)
} else {
glog.Infof("connected successfully using endpoint: %s", endPoint)
break
}
}
return conn, nil
}
// NewCrioRuntime instantiate a crio runtime object
func NewCrioRuntime(endpoint string, timeOut time.Duration) (*CrioRuntime, error) {
if endpoint == "" {
return nil, fmt.Errorf("--runtime-endpoint is not set")
}
clientConnection, err := getConnection([]string{endpoint}, timeOut)
if err != nil {
return nil, errors.Wrap(err, "connect")
}
runtimeClient := pb.NewRuntimeServiceClient(clientConnection)
cr := &CrioRuntime{
client: runtimeClient,
}
return cr, nil
}
|
package io.smallrye.mutiny.streams.stages;
import java.util.Objects;
import java.util.function.Predicate;
import org.eclipse.microprofile.reactive.streams.operators.spi.Stage;
import io.smallrye.mutiny.Multi;
import io.smallrye.mutiny.streams.Engine;
import io.smallrye.mutiny.streams.operators.ProcessingStage;
import io.smallrye.mutiny.streams.operators.ProcessingStageFactory;
import io.smallrye.mutiny.streams.utils.Casts;
/**
* Implementation of the {@link Stage.DropWhile} stage.
*
* @author <a href="http://escoffier.me"><NAME></a>
*/
public class DropWhileStageFactory implements ProcessingStageFactory<Stage.DropWhile> {
@Override
public <I, O> ProcessingStage<I, O> create(Engine engine, Stage.DropWhile stage) {
Predicate<I> predicate = Casts.cast(stage.getPredicate());
return Casts.cast(new TakeWhile<>(predicate));
}
private static class TakeWhile<I> implements ProcessingStage<I, I> {
private final Predicate<I> predicate;
TakeWhile(Predicate<I> predicate) {
this.predicate = Objects.requireNonNull(predicate);
}
@Override
public Multi<I> apply(Multi<I> source) {
return source.transform().bySkippingItemsWhile(predicate);
}
}
}
|
import string
def translator(frm='', to='', delete='', keep=None):
if len(to) == 1:
to = to * len(frm)
trans = string.maketrans(frm, to)
if keep is not None:
allchars = string.maketrans('', '')
delete = allchars.translate(allchars, keep.translate(allchars, delete))
def translate(s):
return s.translate(trans, delete)
return translate
|
<gh_stars>0
/*******************************************************************************
* Copyright 2015 InfinitiesSoft Solutions Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package com.infinities.skyport.model.configuration.platform;
import java.io.Serializable;
import com.infinities.skyport.model.FunctionConfiguration;
public class DataWarehouseConfiguration implements Serializable, Cloneable {
private static final long serialVersionUID = 1L;
private FunctionConfiguration addSnapshotShare = new FunctionConfiguration();
private FunctionConfiguration authorizeComputeFirewalls = new FunctionConfiguration();
private FunctionConfiguration authorizeIPs = new FunctionConfiguration();
private FunctionConfiguration createCluster = new FunctionConfiguration();
private FunctionConfiguration createClusterFirewall = new FunctionConfiguration();
private FunctionConfiguration createClusterParameterGroup = new FunctionConfiguration();
private FunctionConfiguration createClusterSnapshot = new FunctionConfiguration();
private FunctionConfiguration disableLogging = new FunctionConfiguration();
private FunctionConfiguration enableLogging = new FunctionConfiguration();
private FunctionConfiguration getCluster = new FunctionConfiguration();
private FunctionConfiguration getClusterFirewall = new FunctionConfiguration();
private FunctionConfiguration getClusterLoggingStatus = new FunctionConfiguration();
private FunctionConfiguration getClusterParameterGroup = new FunctionConfiguration();
private FunctionConfiguration getClusterProduct = new FunctionConfiguration();
private FunctionConfiguration getClusterSnapshot = new FunctionConfiguration();
private FunctionConfiguration getDataCenterConstraintRequirement = new FunctionConfiguration();
private FunctionConfiguration listClusterFirewalls = new FunctionConfiguration();
private FunctionConfiguration listClusterParameterGroups = new FunctionConfiguration();
private FunctionConfiguration listClusterProducts = new FunctionConfiguration();
private FunctionConfiguration listClusterSnapshots = new FunctionConfiguration();
private FunctionConfiguration listClusterVersions = new FunctionConfiguration();
private FunctionConfiguration listClusters = new FunctionConfiguration();
private FunctionConfiguration rebootCluster = new FunctionConfiguration();
private FunctionConfiguration removeAllSnapshotShares = new FunctionConfiguration();
private FunctionConfiguration removeCluster = new FunctionConfiguration();
private FunctionConfiguration removeClusterFirewall = new FunctionConfiguration();
private FunctionConfiguration removeClusterParameterGroup = new FunctionConfiguration();
private FunctionConfiguration removeClusterSnapshot = new FunctionConfiguration();
private FunctionConfiguration removeSnapshotShare = new FunctionConfiguration();
private FunctionConfiguration resizeCluster = new FunctionConfiguration();
private FunctionConfiguration revokeComputeFirewalls = new FunctionConfiguration();
private FunctionConfiguration revokeIPs = new FunctionConfiguration();
private FunctionConfiguration rotateEncryptionKeys = new FunctionConfiguration();
private FunctionConfiguration supportsAuthorizingComputeFirewalls = new FunctionConfiguration();
private FunctionConfiguration supportsCloudStorageLogging = new FunctionConfiguration();
private FunctionConfiguration supportsClusterFirewalls = new FunctionConfiguration();
private FunctionConfiguration supportsClusterSnapshots = new FunctionConfiguration();
private FunctionConfiguration supportsEncryptionAtRest = new FunctionConfiguration();
private FunctionConfiguration updateParameters = new FunctionConfiguration();
private FunctionConfiguration supportsSnapshotSharing = new FunctionConfiguration();
private FunctionConfiguration updateClusterTags = new FunctionConfiguration();
private FunctionConfiguration updateSnapshotTags = new FunctionConfiguration();
public FunctionConfiguration getAddSnapshotShare() {
return addSnapshotShare;
}
public void setAddSnapshotShare(FunctionConfiguration addSnapshotShare) {
this.addSnapshotShare = addSnapshotShare;
}
public FunctionConfiguration getAuthorizeComputeFirewalls() {
return authorizeComputeFirewalls;
}
public void setAuthorizeComputeFirewalls(FunctionConfiguration authorizeComputeFirewalls) {
this.authorizeComputeFirewalls = authorizeComputeFirewalls;
}
public FunctionConfiguration getAuthorizeIPs() {
return authorizeIPs;
}
public void setAuthorizeIPs(FunctionConfiguration authorizeIPs) {
this.authorizeIPs = authorizeIPs;
}
public FunctionConfiguration getCreateCluster() {
return createCluster;
}
public void setCreateCluster(FunctionConfiguration createCluster) {
this.createCluster = createCluster;
}
public FunctionConfiguration getCreateClusterFirewall() {
return createClusterFirewall;
}
public void setCreateClusterFirewall(FunctionConfiguration createClusterFirewall) {
this.createClusterFirewall = createClusterFirewall;
}
public FunctionConfiguration getCreateClusterParameterGroup() {
return createClusterParameterGroup;
}
public void setCreateClusterParameterGroup(FunctionConfiguration createClusterParameterGroup) {
this.createClusterParameterGroup = createClusterParameterGroup;
}
public FunctionConfiguration getCreateClusterSnapshot() {
return createClusterSnapshot;
}
public void setCreateClusterSnapshot(FunctionConfiguration createClusterSnapshot) {
this.createClusterSnapshot = createClusterSnapshot;
}
public FunctionConfiguration getDisableLogging() {
return disableLogging;
}
public void setDisableLogging(FunctionConfiguration disableLogging) {
this.disableLogging = disableLogging;
}
public FunctionConfiguration getEnableLogging() {
return enableLogging;
}
public void setEnableLogging(FunctionConfiguration enableLogging) {
this.enableLogging = enableLogging;
}
public FunctionConfiguration getGetCluster() {
return getCluster;
}
public void setGetCluster(FunctionConfiguration getCluster) {
this.getCluster = getCluster;
}
public FunctionConfiguration getGetClusterFirewall() {
return getClusterFirewall;
}
public void setGetClusterFirewall(FunctionConfiguration getClusterFirewall) {
this.getClusterFirewall = getClusterFirewall;
}
public FunctionConfiguration getGetClusterLoggingStatus() {
return getClusterLoggingStatus;
}
public void setGetClusterLoggingStatus(FunctionConfiguration getClusterLoggingStatus) {
this.getClusterLoggingStatus = getClusterLoggingStatus;
}
public FunctionConfiguration getGetClusterParameterGroup() {
return getClusterParameterGroup;
}
public void setGetClusterParameterGroup(FunctionConfiguration getClusterParameterGroup) {
this.getClusterParameterGroup = getClusterParameterGroup;
}
public FunctionConfiguration getGetClusterProduct() {
return getClusterProduct;
}
public void setGetClusterProduct(FunctionConfiguration getClusterProduct) {
this.getClusterProduct = getClusterProduct;
}
public FunctionConfiguration getGetClusterSnapshot() {
return getClusterSnapshot;
}
public void setGetClusterSnapshot(FunctionConfiguration getClusterSnapshot) {
this.getClusterSnapshot = getClusterSnapshot;
}
public FunctionConfiguration getGetDataCenterConstraintRequirement() {
return getDataCenterConstraintRequirement;
}
public void setGetDataCenterConstraintRequirement(FunctionConfiguration getDataCenterConstraintRequirement) {
this.getDataCenterConstraintRequirement = getDataCenterConstraintRequirement;
}
public FunctionConfiguration getListClusterFirewalls() {
return listClusterFirewalls;
}
public void setListClusterFirewalls(FunctionConfiguration listClusterFirewalls) {
this.listClusterFirewalls = listClusterFirewalls;
}
public FunctionConfiguration getListClusterParameterGroups() {
return listClusterParameterGroups;
}
public void setListClusterParameterGroups(FunctionConfiguration listClusterParameterGroups) {
this.listClusterParameterGroups = listClusterParameterGroups;
}
public FunctionConfiguration getListClusterProducts() {
return listClusterProducts;
}
public void setListClusterProducts(FunctionConfiguration listClusterProducts) {
this.listClusterProducts = listClusterProducts;
}
public FunctionConfiguration getListClusterSnapshots() {
return listClusterSnapshots;
}
public void setListClusterSnapshots(FunctionConfiguration listClusterSnapshots) {
this.listClusterSnapshots = listClusterSnapshots;
}
public FunctionConfiguration getListClusterVersions() {
return listClusterVersions;
}
public void setListClusterVersions(FunctionConfiguration listClusterVersions) {
this.listClusterVersions = listClusterVersions;
}
public FunctionConfiguration getListClusters() {
return listClusters;
}
public void setListClusters(FunctionConfiguration listClusters) {
this.listClusters = listClusters;
}
public FunctionConfiguration getRebootCluster() {
return rebootCluster;
}
public void setRebootCluster(FunctionConfiguration rebootCluster) {
this.rebootCluster = rebootCluster;
}
public FunctionConfiguration getRemoveAllSnapshotShares() {
return removeAllSnapshotShares;
}
public void setRemoveAllSnapshotShares(FunctionConfiguration removeAllSnapshotShares) {
this.removeAllSnapshotShares = removeAllSnapshotShares;
}
public FunctionConfiguration getRemoveCluster() {
return removeCluster;
}
public void setRemoveCluster(FunctionConfiguration removeCluster) {
this.removeCluster = removeCluster;
}
public FunctionConfiguration getRemoveClusterFirewall() {
return removeClusterFirewall;
}
public void setRemoveClusterFirewall(FunctionConfiguration removeClusterFirewall) {
this.removeClusterFirewall = removeClusterFirewall;
}
public FunctionConfiguration getRemoveClusterParameterGroup() {
return removeClusterParameterGroup;
}
public void setRemoveClusterParameterGroup(FunctionConfiguration removeClusterParameterGroup) {
this.removeClusterParameterGroup = removeClusterParameterGroup;
}
public FunctionConfiguration getRemoveClusterSnapshot() {
return removeClusterSnapshot;
}
public void setRemoveClusterSnapshot(FunctionConfiguration removeClusterSnapshot) {
this.removeClusterSnapshot = removeClusterSnapshot;
}
public FunctionConfiguration getRemoveSnapshotShare() {
return removeSnapshotShare;
}
public void setRemoveSnapshotShare(FunctionConfiguration removeSnapshotShare) {
this.removeSnapshotShare = removeSnapshotShare;
}
public FunctionConfiguration getResizeCluster() {
return resizeCluster;
}
public void setResizeCluster(FunctionConfiguration resizeCluster) {
this.resizeCluster = resizeCluster;
}
public FunctionConfiguration getRevokeComputeFirewalls() {
return revokeComputeFirewalls;
}
public void setRevokeComputeFirewalls(FunctionConfiguration revokeComputeFirewalls) {
this.revokeComputeFirewalls = revokeComputeFirewalls;
}
public FunctionConfiguration getRevokeIPs() {
return revokeIPs;
}
public void setRevokeIPs(FunctionConfiguration revokeIPs) {
this.revokeIPs = revokeIPs;
}
public FunctionConfiguration getRotateEncryptionKeys() {
return rotateEncryptionKeys;
}
public void setRotateEncryptionKeys(FunctionConfiguration rotateEncryptionKeys) {
this.rotateEncryptionKeys = rotateEncryptionKeys;
}
public FunctionConfiguration getSupportsAuthorizingComputeFirewalls() {
return supportsAuthorizingComputeFirewalls;
}
public void setSupportsAuthorizingComputeFirewalls(FunctionConfiguration supportsAuthorizingComputeFirewalls) {
this.supportsAuthorizingComputeFirewalls = supportsAuthorizingComputeFirewalls;
}
public FunctionConfiguration getSupportsCloudStorageLogging() {
return supportsCloudStorageLogging;
}
public void setSupportsCloudStorageLogging(FunctionConfiguration supportsCloudStorageLogging) {
this.supportsCloudStorageLogging = supportsCloudStorageLogging;
}
public FunctionConfiguration getSupportsClusterFirewalls() {
return supportsClusterFirewalls;
}
public void setSupportsClusterFirewalls(FunctionConfiguration supportsClusterFirewalls) {
this.supportsClusterFirewalls = supportsClusterFirewalls;
}
public FunctionConfiguration getSupportsClusterSnapshots() {
return supportsClusterSnapshots;
}
public void setSupportsClusterSnapshots(FunctionConfiguration supportsClusterSnapshots) {
this.supportsClusterSnapshots = supportsClusterSnapshots;
}
public FunctionConfiguration getSupportsEncryptionAtRest() {
return supportsEncryptionAtRest;
}
public void setSupportsEncryptionAtRest(FunctionConfiguration supportsEncryptionAtRest) {
this.supportsEncryptionAtRest = supportsEncryptionAtRest;
}
public FunctionConfiguration getUpdateParameters() {
return updateParameters;
}
public void setUpdateParameters(FunctionConfiguration updateParameters) {
this.updateParameters = updateParameters;
}
public FunctionConfiguration getSupportsSnapshotSharing() {
return supportsSnapshotSharing;
}
public void setSupportsSnapshotSharing(FunctionConfiguration supportsSnapshotSharing) {
this.supportsSnapshotSharing = supportsSnapshotSharing;
}
public FunctionConfiguration getUpdateClusterTags() {
return updateClusterTags;
}
public void setUpdateClusterTags(FunctionConfiguration updateClusterTags) {
this.updateClusterTags = updateClusterTags;
}
public FunctionConfiguration getUpdateSnapshotTags() {
return updateSnapshotTags;
}
public void setUpdateSnapshotTags(FunctionConfiguration updateSnapshotTags) {
this.updateSnapshotTags = updateSnapshotTags;
}
@Override
public DataWarehouseConfiguration clone() {
DataWarehouseConfiguration clone = new DataWarehouseConfiguration();
clone.addSnapshotShare = addSnapshotShare.clone();
clone.authorizeComputeFirewalls = authorizeComputeFirewalls.clone();
clone.authorizeIPs = authorizeIPs.clone();
clone.createCluster = createCluster.clone();
clone.createClusterFirewall = createClusterFirewall.clone();
clone.createClusterParameterGroup = createClusterParameterGroup.clone();
clone.createClusterSnapshot = createClusterSnapshot.clone();
clone.disableLogging = disableLogging.clone();
clone.enableLogging = enableLogging.clone();
clone.getCluster = getCluster.clone();
clone.getClusterFirewall = getClusterFirewall.clone();
clone.getClusterLoggingStatus = getClusterLoggingStatus.clone();
clone.getClusterParameterGroup = getClusterParameterGroup.clone();
clone.getClusterProduct = getClusterProduct.clone();
clone.getClusterSnapshot = getClusterSnapshot.clone();
clone.getDataCenterConstraintRequirement = getDataCenterConstraintRequirement.clone();
clone.listClusterFirewalls = listClusterFirewalls.clone();
clone.listClusterParameterGroups = listClusterParameterGroups.clone();
clone.listClusterProducts = listClusterProducts.clone();
clone.listClusterSnapshots = listClusterSnapshots.clone();
clone.listClusterVersions = listClusterVersions.clone();
clone.listClusters = listClusters.clone();
clone.rebootCluster = rebootCluster.clone();
clone.removeAllSnapshotShares = removeAllSnapshotShares.clone();
clone.removeCluster = removeCluster.clone();
clone.removeClusterFirewall = removeClusterFirewall.clone();
clone.removeClusterParameterGroup = removeClusterParameterGroup.clone();
clone.removeClusterSnapshot = removeClusterSnapshot.clone();
clone.removeSnapshotShare = removeSnapshotShare.clone();
clone.resizeCluster = resizeCluster.clone();
clone.revokeComputeFirewalls = revokeComputeFirewalls.clone();
clone.revokeIPs = revokeIPs.clone();
clone.rotateEncryptionKeys = rotateEncryptionKeys.clone();
clone.supportsAuthorizingComputeFirewalls = supportsAuthorizingComputeFirewalls.clone();
clone.supportsCloudStorageLogging = supportsCloudStorageLogging.clone();
clone.supportsClusterFirewalls = supportsClusterFirewalls.clone();
clone.supportsClusterSnapshots = supportsClusterSnapshots.clone();
clone.supportsEncryptionAtRest = supportsEncryptionAtRest.clone();
clone.updateParameters = updateParameters.clone();
clone.supportsSnapshotSharing = supportsSnapshotSharing.clone();
clone.updateClusterTags = updateClusterTags.clone();
clone.updateSnapshotTags = updateSnapshotTags.clone();
return clone;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((addSnapshotShare == null) ? 0 : addSnapshotShare.hashCode());
result = prime * result + ((authorizeComputeFirewalls == null) ? 0 : authorizeComputeFirewalls.hashCode());
result = prime * result + ((authorizeIPs == null) ? 0 : authorizeIPs.hashCode());
result = prime * result + ((createCluster == null) ? 0 : createCluster.hashCode());
result = prime * result + ((createClusterFirewall == null) ? 0 : createClusterFirewall.hashCode());
result = prime * result + ((createClusterParameterGroup == null) ? 0 : createClusterParameterGroup.hashCode());
result = prime * result + ((createClusterSnapshot == null) ? 0 : createClusterSnapshot.hashCode());
result = prime * result + ((disableLogging == null) ? 0 : disableLogging.hashCode());
result = prime * result + ((enableLogging == null) ? 0 : enableLogging.hashCode());
result = prime * result + ((getCluster == null) ? 0 : getCluster.hashCode());
result = prime * result + ((getClusterFirewall == null) ? 0 : getClusterFirewall.hashCode());
result = prime * result + ((getClusterLoggingStatus == null) ? 0 : getClusterLoggingStatus.hashCode());
result = prime * result + ((getClusterParameterGroup == null) ? 0 : getClusterParameterGroup.hashCode());
result = prime * result + ((getClusterProduct == null) ? 0 : getClusterProduct.hashCode());
result = prime * result + ((getClusterSnapshot == null) ? 0 : getClusterSnapshot.hashCode());
result = prime * result
+ ((getDataCenterConstraintRequirement == null) ? 0 : getDataCenterConstraintRequirement.hashCode());
result = prime * result + ((listClusterFirewalls == null) ? 0 : listClusterFirewalls.hashCode());
result = prime * result + ((listClusterParameterGroups == null) ? 0 : listClusterParameterGroups.hashCode());
result = prime * result + ((listClusterProducts == null) ? 0 : listClusterProducts.hashCode());
result = prime * result + ((listClusterSnapshots == null) ? 0 : listClusterSnapshots.hashCode());
result = prime * result + ((listClusterVersions == null) ? 0 : listClusterVersions.hashCode());
result = prime * result + ((listClusters == null) ? 0 : listClusters.hashCode());
result = prime * result + ((rebootCluster == null) ? 0 : rebootCluster.hashCode());
result = prime * result + ((removeAllSnapshotShares == null) ? 0 : removeAllSnapshotShares.hashCode());
result = prime * result + ((removeCluster == null) ? 0 : removeCluster.hashCode());
result = prime * result + ((removeClusterFirewall == null) ? 0 : removeClusterFirewall.hashCode());
result = prime * result + ((removeClusterParameterGroup == null) ? 0 : removeClusterParameterGroup.hashCode());
result = prime * result + ((removeClusterSnapshot == null) ? 0 : removeClusterSnapshot.hashCode());
result = prime * result + ((removeSnapshotShare == null) ? 0 : removeSnapshotShare.hashCode());
result = prime * result + ((resizeCluster == null) ? 0 : resizeCluster.hashCode());
result = prime * result + ((revokeComputeFirewalls == null) ? 0 : revokeComputeFirewalls.hashCode());
result = prime * result + ((revokeIPs == null) ? 0 : revokeIPs.hashCode());
result = prime * result + ((rotateEncryptionKeys == null) ? 0 : rotateEncryptionKeys.hashCode());
result = prime * result
+ ((supportsAuthorizingComputeFirewalls == null) ? 0 : supportsAuthorizingComputeFirewalls.hashCode());
result = prime * result + ((supportsCloudStorageLogging == null) ? 0 : supportsCloudStorageLogging.hashCode());
result = prime * result + ((supportsClusterFirewalls == null) ? 0 : supportsClusterFirewalls.hashCode());
result = prime * result + ((supportsClusterSnapshots == null) ? 0 : supportsClusterSnapshots.hashCode());
result = prime * result + ((supportsEncryptionAtRest == null) ? 0 : supportsEncryptionAtRest.hashCode());
result = prime * result + ((supportsSnapshotSharing == null) ? 0 : supportsSnapshotSharing.hashCode());
result = prime * result + ((updateClusterTags == null) ? 0 : updateClusterTags.hashCode());
result = prime * result + ((updateParameters == null) ? 0 : updateParameters.hashCode());
result = prime * result + ((updateSnapshotTags == null) ? 0 : updateSnapshotTags.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DataWarehouseConfiguration other = (DataWarehouseConfiguration) obj;
if (addSnapshotShare == null) {
if (other.addSnapshotShare != null)
return false;
} else if (!addSnapshotShare.equals(other.addSnapshotShare))
return false;
if (authorizeComputeFirewalls == null) {
if (other.authorizeComputeFirewalls != null)
return false;
} else if (!authorizeComputeFirewalls.equals(other.authorizeComputeFirewalls))
return false;
if (authorizeIPs == null) {
if (other.authorizeIPs != null)
return false;
} else if (!authorizeIPs.equals(other.authorizeIPs))
return false;
if (createCluster == null) {
if (other.createCluster != null)
return false;
} else if (!createCluster.equals(other.createCluster))
return false;
if (createClusterFirewall == null) {
if (other.createClusterFirewall != null)
return false;
} else if (!createClusterFirewall.equals(other.createClusterFirewall))
return false;
if (createClusterParameterGroup == null) {
if (other.createClusterParameterGroup != null)
return false;
} else if (!createClusterParameterGroup.equals(other.createClusterParameterGroup))
return false;
if (createClusterSnapshot == null) {
if (other.createClusterSnapshot != null)
return false;
} else if (!createClusterSnapshot.equals(other.createClusterSnapshot))
return false;
if (disableLogging == null) {
if (other.disableLogging != null)
return false;
} else if (!disableLogging.equals(other.disableLogging))
return false;
if (enableLogging == null) {
if (other.enableLogging != null)
return false;
} else if (!enableLogging.equals(other.enableLogging))
return false;
if (getCluster == null) {
if (other.getCluster != null)
return false;
} else if (!getCluster.equals(other.getCluster))
return false;
if (getClusterFirewall == null) {
if (other.getClusterFirewall != null)
return false;
} else if (!getClusterFirewall.equals(other.getClusterFirewall))
return false;
if (getClusterLoggingStatus == null) {
if (other.getClusterLoggingStatus != null)
return false;
} else if (!getClusterLoggingStatus.equals(other.getClusterLoggingStatus))
return false;
if (getClusterParameterGroup == null) {
if (other.getClusterParameterGroup != null)
return false;
} else if (!getClusterParameterGroup.equals(other.getClusterParameterGroup))
return false;
if (getClusterProduct == null) {
if (other.getClusterProduct != null)
return false;
} else if (!getClusterProduct.equals(other.getClusterProduct))
return false;
if (getClusterSnapshot == null) {
if (other.getClusterSnapshot != null)
return false;
} else if (!getClusterSnapshot.equals(other.getClusterSnapshot))
return false;
if (getDataCenterConstraintRequirement == null) {
if (other.getDataCenterConstraintRequirement != null)
return false;
} else if (!getDataCenterConstraintRequirement.equals(other.getDataCenterConstraintRequirement))
return false;
if (listClusterFirewalls == null) {
if (other.listClusterFirewalls != null)
return false;
} else if (!listClusterFirewalls.equals(other.listClusterFirewalls))
return false;
if (listClusterParameterGroups == null) {
if (other.listClusterParameterGroups != null)
return false;
} else if (!listClusterParameterGroups.equals(other.listClusterParameterGroups))
return false;
if (listClusterProducts == null) {
if (other.listClusterProducts != null)
return false;
} else if (!listClusterProducts.equals(other.listClusterProducts))
return false;
if (listClusterSnapshots == null) {
if (other.listClusterSnapshots != null)
return false;
} else if (!listClusterSnapshots.equals(other.listClusterSnapshots))
return false;
if (listClusterVersions == null) {
if (other.listClusterVersions != null)
return false;
} else if (!listClusterVersions.equals(other.listClusterVersions))
return false;
if (listClusters == null) {
if (other.listClusters != null)
return false;
} else if (!listClusters.equals(other.listClusters))
return false;
if (rebootCluster == null) {
if (other.rebootCluster != null)
return false;
} else if (!rebootCluster.equals(other.rebootCluster))
return false;
if (removeAllSnapshotShares == null) {
if (other.removeAllSnapshotShares != null)
return false;
} else if (!removeAllSnapshotShares.equals(other.removeAllSnapshotShares))
return false;
if (removeCluster == null) {
if (other.removeCluster != null)
return false;
} else if (!removeCluster.equals(other.removeCluster))
return false;
if (removeClusterFirewall == null) {
if (other.removeClusterFirewall != null)
return false;
} else if (!removeClusterFirewall.equals(other.removeClusterFirewall))
return false;
if (removeClusterParameterGroup == null) {
if (other.removeClusterParameterGroup != null)
return false;
} else if (!removeClusterParameterGroup.equals(other.removeClusterParameterGroup))
return false;
if (removeClusterSnapshot == null) {
if (other.removeClusterSnapshot != null)
return false;
} else if (!removeClusterSnapshot.equals(other.removeClusterSnapshot))
return false;
if (removeSnapshotShare == null) {
if (other.removeSnapshotShare != null)
return false;
} else if (!removeSnapshotShare.equals(other.removeSnapshotShare))
return false;
if (resizeCluster == null) {
if (other.resizeCluster != null)
return false;
} else if (!resizeCluster.equals(other.resizeCluster))
return false;
if (revokeComputeFirewalls == null) {
if (other.revokeComputeFirewalls != null)
return false;
} else if (!revokeComputeFirewalls.equals(other.revokeComputeFirewalls))
return false;
if (revokeIPs == null) {
if (other.revokeIPs != null)
return false;
} else if (!revokeIPs.equals(other.revokeIPs))
return false;
if (rotateEncryptionKeys == null) {
if (other.rotateEncryptionKeys != null)
return false;
} else if (!rotateEncryptionKeys.equals(other.rotateEncryptionKeys))
return false;
if (supportsAuthorizingComputeFirewalls == null) {
if (other.supportsAuthorizingComputeFirewalls != null)
return false;
} else if (!supportsAuthorizingComputeFirewalls.equals(other.supportsAuthorizingComputeFirewalls))
return false;
if (supportsCloudStorageLogging == null) {
if (other.supportsCloudStorageLogging != null)
return false;
} else if (!supportsCloudStorageLogging.equals(other.supportsCloudStorageLogging))
return false;
if (supportsClusterFirewalls == null) {
if (other.supportsClusterFirewalls != null)
return false;
} else if (!supportsClusterFirewalls.equals(other.supportsClusterFirewalls))
return false;
if (supportsClusterSnapshots == null) {
if (other.supportsClusterSnapshots != null)
return false;
} else if (!supportsClusterSnapshots.equals(other.supportsClusterSnapshots))
return false;
if (supportsEncryptionAtRest == null) {
if (other.supportsEncryptionAtRest != null)
return false;
} else if (!supportsEncryptionAtRest.equals(other.supportsEncryptionAtRest))
return false;
if (supportsSnapshotSharing == null) {
if (other.supportsSnapshotSharing != null)
return false;
} else if (!supportsSnapshotSharing.equals(other.supportsSnapshotSharing))
return false;
if (updateClusterTags == null) {
if (other.updateClusterTags != null)
return false;
} else if (!updateClusterTags.equals(other.updateClusterTags))
return false;
if (updateParameters == null) {
if (other.updateParameters != null)
return false;
} else if (!updateParameters.equals(other.updateParameters))
return false;
if (updateSnapshotTags == null) {
if (other.updateSnapshotTags != null)
return false;
} else if (!updateSnapshotTags.equals(other.updateSnapshotTags))
return false;
return true;
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package lista2;
/**
*
* @author PauloCésar
*/
public class Agenda {
//private String[] nomes;
//private String[] cpfs;
private Pessoa[] pessoas; //???????????
private int i=0;
private int totalPessoas;
//construtor padrão
public Agenda(){
totalPessoas=250;
pessoas = new Pessoa[totalPessoas]; //definiçao do tamanho de um vetor
}
//construtor personalizado
public Agenda(int qtd){
totalPessoas = qtd;
pessoas = new Pessoa[totalPessoas];
}
public void armazenarPessoa(Pessoa pessoa){
if(i < totalPessoas){
pessoas[i] = pessoa;
i++;
}else{
System.out.println("Espaço insuficiente!");
}
}
// Apresentar solução mais correta
public void removerPessoa(Pessoa pessoa){
int posicaoRemovida = buscarPessoa(pessoa);
if(posicaoRemovida != -1){
pessoas[posicaoRemovida] = new Pessoa("", "", "", "");
//... reorganizar o vetor
}else{
System.out.println("Pessoa não encontrada!");
}
}
public int buscarPessoa(Pessoa pessoa){
String cpfArmazenado;
for(int j=0; j<i; j++){
cpfArmazenado = pessoas[j].getCpf();
if( cpfArmazenado.equals(pessoa.getCpf()) ){ // == equivale .equals()
return j; //pessoas[j] representa um obj pessoa
}
}
return -1;
}
public Pessoa buscarPessoa(String cpf){
String cpfArmazenado;
for(int j=0; j<i; j++){
cpfArmazenado = pessoas[j].getCpf();
if( cpfArmazenado.equals(cpf) ){ // == equivale .equals()
return pessoas[j]; //pessoas[j] representa um obj pessoa
}
}
return null;
}
//Imprimir somente uma pessoa
public void imprimirPessoa(String cpf){
Pessoa p = buscarPessoa(cpf);
p.imprimirPessoa();
}
public void imprimirAgenda(){
for(int j=0; j<i; j++){
if( !pessoas[j].getNome().equals("") )
pessoas[j].imprimirPessoa();
}
}
}
|
#encoding:utf-8
import time
def toDate(timeStamp):
timeArray = time.localtime(timeStamp)
return time.strftime("%Y-%m-%d", timeArray)
|
#!bin/bash
# Install Caliper dependencies
function installCaliperDependencies() {
# Caliper directory
cd $1/caliper
# Get access to the local update config store
sudo chown -R $USER:$(id -gn $USER) /home/lucas/.config
# Install Caliper dependencies
npm install
} |
<gh_stars>0
import * as React from 'react';
import PropTypes from 'prop-types';
import Head from 'next/head';
import { ThemeProvider } from '@mui/material/styles';
import CssBaseline from '@mui/material/CssBaseline';
import { CacheProvider } from '@emotion/react';
import theme from '../src/theme';
import createEmotionCache from '../src/createEmotionCache';
import { Global, css} from '@emotion/react';
// Client-side cache, shared for the whole session of the user in the browser.
const clientSideEmotionCache = createEmotionCache();
export default function MyApp(props) {
const { Component, emotionCache = clientSideEmotionCache, pageProps } = props;
return (
<CacheProvider value={emotionCache}>
<Head>
<title>Sanjana and Shirish</title>
<meta name="viewport" content="initial-scale=1, width=device-width" />
</Head>
<ThemeProvider theme={theme}>
{/* CssBaseline kickstart an elegant, consistent, and simple baseline to build upon. */}
<CssBaseline />
<Global styles={css` .chq-atc {
display: inline-block;
position: relative;
}
.chq-atc--button {
background: transparent;
border: 0;
border-radius: 5px;
box-sizing: border-box;
color: #6a89af;
cursor: pointer;
display: inline;
font-family: inherit;
font-size: inherit;
line-height: inherit;
margin: 0;
outline: none;
padding: 4px 10px;
}
.chq-atc--button:focus {
background-color: #eaeaea;
}
.chq-atc--button:hover {
background-color: #eaeaea;
}
.chq-atc--button svg {
vertical-align: text-bottom;
}
.chq-atc--button path {
fill: #6a89af;
}
.chq-atc--dropdown {
background-color: white;
border-radius: 5px;
border: 1px solid #eaeaea;
box-shadow: .5px .5px 3px rgba(0, 0, 0, .2);
box-sizing: border-box;
position: absolute;
text-align: left;
white-space: nowrap;
width: 100%;
z-index: 1;
}
.chq-atc--dropdown a {
color: #6a89af;
display: block;
padding: 8px 15px;
text-decoration: none;
}
.chq-atc--dropdown a:hover {
background-color: #eaeaea;
} `}/>
<Component {...pageProps} />
</ThemeProvider>
</CacheProvider>
);
}
MyApp.propTypes = {
Component: PropTypes.elementType.isRequired,
emotionCache: PropTypes.object,
pageProps: PropTypes.object.isRequired,
};
|
const {csv2json} = require('../csv2json');
const { QueueClient }= require('@azure/storage-queue');
process.env.JSON_STORAGE_CONNECTION = '';
process.env.JSON_STORAGE_QUEUE = '';
jest.mock('@azure/storage-queue', () => ({
QueueClient: jest.fn().mockImplementation((_, __) => {})
}));
function MockContext() {};
MockContext.prototype.log = function(txt) {
console.log(txt);
};
MockContext.prototype.log.warn = function(txt) {
console.warn(txt);
}
MockContext.prototype.log.error = function(txt) {
console.error(txt);
}
MockContext.prototype.log.verbose = function(txt) {
console.debug(txt);
}
const mockContext = new MockContext();
describe('csv2json', () => {
it('works with twin without properties', async () => {
testValue = '"$metadata.$model","$id"\n'
+ '"modelName","instanceId"';
mockMessages = [];
QueueClient.mockImplementation((_, __) => {
return {
createIfNotExists: jest.fn(),
sendMessage: jest.fn().mockImplementation((str) => {
mockMessages.push(JSON.parse(Buffer.from(str, 'base64').toString()));
return new Promise(() => {});
}),
}
});
await csv2json(mockContext, testValue);
expect(mockMessages.length).toBe(1);
expect(mockMessages[0].$id).toBe('instanceId');
expect(mockMessages[0].$metadata.$model).toBe('modelName');
});
it('creates one twin per row', async () => {
testValue = '"$metadata.$model","$id"\n'
+ '"modelName","instanceOne"\n'
+ '"modelName","instanceTwo"';
mockMessages = [];
QueueClient.mockImplementation((_, __) => {
return {
createIfNotExists: jest.fn(),
sendMessage: jest.fn().mockImplementation((str) => {
mockMessages.push(JSON.parse(Buffer.from(str, 'base64').toString()));
return new Promise(() => {});
}),
}
});
await csv2json(mockContext, testValue);
expect(mockMessages.length).toBe(2);
expect(mockMessages[0].$id).toBe('instanceOne');
expect(mockMessages[0].$metadata.$model).toBe('modelName');
expect(mockMessages[1].$id).toBe('instanceTwo');
expect(mockMessages[1].$metadata.$model).toBe('modelName');
});
it('handles integer properties', async () => {
testValue = '"$metadata.$model","$id",answer\n'
+ '"modelName","instanceId",42';
mockMessages = [];
QueueClient.mockImplementation((_, __) => {
return {
createIfNotExists: jest.fn(),
sendMessage: jest.fn().mockImplementation((str) => {
mockMessages.push(JSON.parse(Buffer.from(str, 'base64').toString()));
return new Promise(() => {});
}),
}
});
await csv2json(mockContext, testValue);
expect(mockMessages.length).toBe(1);
expect(mockMessages[0].answer).toBe(42);
});
it('handles double properties', async () => {
testValue = '"$metadata.$model","$id",e\n'
+ '"modelName","instanceId",2.71828';
mockMessages = [];
QueueClient.mockImplementation((_, __) => {
return {
createIfNotExists: jest.fn(),
sendMessage: jest.fn().mockImplementation((str) => {
mockMessages.push(JSON.parse(Buffer.from(str, 'base64').toString()));
return new Promise(() => {});
}),
}
});
await csv2json(mockContext, testValue);
expect(mockMessages.length).toBe(1);
expect(mockMessages[0].e).toBe(2.71828);
});
it('handles object properties', async () => {
testValue = '"$metadata.$model","$id","eeny.meenie.miney.moe"\n'
+ '"modelName","instanceId","catch a tiger by the toe"';
mockMessages = [];
QueueClient.mockImplementation((_, __) => {
return {
createIfNotExists: jest.fn(),
sendMessage: jest.fn().mockImplementation((str) => {
mockMessages.push(JSON.parse(Buffer.from(str, 'base64').toString()));
return new Promise(() => {});
}),
}
});
await csv2json(mockContext, testValue);
expect(mockMessages.length).toBe(1);
expect(mockMessages[0].eeny.meenie.miney.moe).toBe('catch a tiger by the toe');
});
it('handles simple relationships', async () => {
testValue = '"$sourceId","$targetId","$relationshipId","$relationshipName"\n'
+ '"sourceId","targetId","sourceId-relation-targetId","relationName"';
mockMessages = [];
QueueClient.mockImplementation((_, __) => {
return {
createIfNotExists: jest.fn(),
sendMessage: jest.fn().mockImplementation((str) => {
mockMessages.push(JSON.parse(Buffer.from(str, 'base64').toString()));
return new Promise(() => {});
}),
}
});
await csv2json(mockContext, testValue);
expect(mockMessages.length).toBe(1);
expect(mockMessages[0].$sourceId).toBe('sourceId');
expect(mockMessages[0].$targetId).toBe('targetId');
expect(mockMessages[0].$relationshipId).toBe('sourceId-relation-targetId');
expect(mockMessages[0].$relationshipName).toBe('relationName');
})
it('handles relationships with properties', async () => {
testValue = '"$sourceId","$targetId","$relationshipId","$relationshipName","property1","nested.property"\n'
+ '"sourceId","targetId","sourceId-relation-targetId","relationName",3.14159,"value"';
mockMessages = [];
QueueClient.mockImplementation((_, __) => {
return {
createIfNotExists: jest.fn(),
sendMessage: jest.fn().mockImplementation((str) => {
mockMessages.push(JSON.parse(Buffer.from(str, 'base64').toString()));
return new Promise(() => {});
}),
}
});
await csv2json(mockContext, testValue);
expect(mockMessages.length).toBe(1);
expect(mockMessages[0].property1).toBe(3.14159);
expect(mockMessages[0].nested.property).toBe("value");
})
it('creates one relationship per row', async () => {
testValue = '"$sourceId","$targetId","$relationshipId","$relationshipName","property1","nested.property"\n'
+ '"sourceId","targetId","sourceId-relation-targetId","relationName",3.14159,"value"\n'
+ '"sourceId2","targetId2","sourceId-relation-targetId2","relationName2", 2.71828,"value2"';
mockMessages = [];
QueueClient.mockImplementation((_, __) => {
return {
createIfNotExists: jest.fn(),
sendMessage: jest.fn().mockImplementation((str) => {
mockMessages.push(JSON.parse(Buffer.from(str, 'base64').toString()));
return new Promise(() => {});
}),
}
});
await csv2json(mockContext, testValue);
expect(mockMessages.length).toBe(2);
expect(mockMessages[0].property1).toBe(3.14159);
expect(mockMessages[1].property1).toBe(2.71828);
})
});
|
import { hexToBin } from "./utils";
export class Packet {
version: number;
type: number;
subPackets: Packet[] = [];
value = 0;
constructor(version: number, type: number) {
this.version = version;
this.type = type;
}
}
const calculatePacketValue = (packet: Packet): number => {
switch(packet.type) {
case 0: // sum
return packet.subPackets.reduce((a, b) => a + b.value, 0);
case 1: // product
return packet.subPackets.reduce((a, b) => a * b.value, 1);
case 2: // min
return Math.min(...packet.subPackets.map((p) => p.value));
case 3: // max
return Math.max(...packet.subPackets.map((p) => p.value));
case 5: //greater than
return packet.subPackets[0].value > packet.subPackets[1].value ? 1 : 0;
case 6: //lower than
return packet.subPackets[0].value < packet.subPackets[1].value ? 1 : 0;
case 7: //equal to
return packet.subPackets[0].value === packet.subPackets[1].value ? 1 : 0;
default:
break;
}
return 0;
}
export class PacketParser {
binaryStream: string[] = [];
sumVersions = 0;
packetValue = 0;
constructor(input: string) {
this.binaryStream = hexToBin(input);
this.parsePackets();
}
addUpVersionsForAllPackets = (packets: Packet[]): number => {
return packets
.map((p) => p.version + this.addUpVersionsForAllPackets(p.subPackets))
.reduce((a, b) => a + b, 0);
}
parseLiterals = (binary: string[]) => {
let decoded = '';
while(true) {
const chunk = binary.splice(0, 5).join('');
decoded += chunk.substring(1);
// when we reahced 0, we can now exit.
if (chunk[0] === '0') {
break;
}
}
return decoded;
}
parsePackets = () => {
const packets = this.parsePacket(this.binaryStream);
this.sumVersions = this.addUpVersionsForAllPackets(packets)
this.packetValue = packets[0].value;
}
get versionSum () {
return this.sumVersions;
}
get outterPacketValue () {
return this.packetValue;
}
parsePacket = (binaryStream: string[], numSubpackets = -1) => {
const packets: Packet[] = [];
let numParsedSubpackets = 0;
while(binaryStream.length > 0 &&
(numSubpackets < 0 || numParsedSubpackets < numSubpackets)
) {
// we got no more significant bits. all zeros, lets bail now.
if (binaryStream.filter(b => b === '1') === undefined) {
break;
}
const version = parseInt(binaryStream.splice(0, 3).join(''), 2);
const type = parseInt(binaryStream.splice(0, 3).join(''), 2);
const packet = new Packet(version, type);
numParsedSubpackets += 1;
// this is a type 4, which means it is a literal package
if (type === 4) {
const decoded = this.parseLiterals(binaryStream);
packet.value = parseInt(decoded, 2);
} else {
const packetId = binaryStream.shift();
if (packetId === '1') {
// next 11 bits are the subpacket size
const subpacketCount = parseInt(binaryStream.splice(0, 11).join(''), 2);
packet.subPackets = this.parsePacket(binaryStream, subpacketCount);
} else if (packetId === '0') {
// next 15 bits are the subpacket size
const subpacketSize = parseInt(binaryStream.splice(0, 15).join(''), 2);
packet.subPackets = this.parsePacket(binaryStream.splice(0, subpacketSize));
}
packet.value = calculatePacketValue(packet);
}
packets.push(packet);
}
return packets;
}
} |
<gh_stars>0
'''
meta allows to use operations form the client with the resource
'''
import boto3
aws_mag_con=boto3.session.Session(profile_name="root")
ec2_con_re=aws_mag_con.resource(service_name="ec2")
for each_item in ec2_con_re.meta.client.describe_regions()['Regions']:
print(each_item['RegionName'])
|
public class ReverseString {
public static void main(String[] args) {
System.out.println("Input a string: ");
Scanner sc = new Scanner(System.in);
String str = sc.next();
String reversed = "";
for (int i = str.length() - 1; i >= 0; i--) {
reversed += str.charAt(i);
}
System.out.println("Reversed String is " + reversed);
}
} |
#Python 3.8.0
#Make by Lonely Dark
import argparse
import os
parser=argparse.ArgumentParser()
parser.add_argument('-r', '--recursive', help='recursive add files', action='store_true')
parser.add_argument('-d', '--directory', help='directory where the files(or file) are. If file one, input file with full name', required=True)
args=parser.parse_args()
if args.recursive:
os.chdir(args.directory)
files=os.listdir()
for e in files:
try:
file1=open(e,'rb')
file2=open('backup_'+e, 'wb')
file2.write(file1.read())
file1.close()
file2.close()
except PermissionError:
os.chdir(e)
files=os.listdir()
for e in files:
file1=open(e,'rb')
file2=open('backup_'+e, 'wb')
file2.write(file1.read())
file1.close()
file2.close()
os.chdir(args.directory)
continue
else:
file1=open(args.directory, 'rb')
file_name=args.directory.split('.')
file2=open(file_name[0]+'_backup'+'.'+file_name[1], 'wb')
file2.write(file1.read())
file1.close()
file2.close()
|
#!/bin/bash
set -e
syncit() {
channel=$1
version=${2:-current}
wget --cut-dirs=1 -nH --quiet -A coreos_production_image*,coreos_production_pxe*,version.txt* -m http://${channel}.release.core-os.net/amd64-usr/${version}
}
cd $(dirname $0)
(
cd bodil/static/images/coreos/stable
syncit stable
)
(
cd bodil/static/images/coreos/beta
syncit beta
)
(
cd bodil/static/images/coreos/alpha
syncit alpha
)
|
<filename>src/assets/index.ts
import vectors from './vectors.json';
export default vectors;
|
def preOrderTraversal(root):
if root is None:
return
print(root.data)
preOrderTraversal(root.left)
preOrderTraversal(root.right) |
#!/bin/bash
# Write a shell script which will receive 5 numbers from command line
# and print their sum.
echo "Sum of Five Numbers is:" $(($1 + $2 + $3 + $4 + $5))
|
#!/usr/bin/env bash
min=1
max=100
# Generate a random number between min and max.
target=$(( ( RANDOM % $max ) + $min ))
min=$[ $min - 1 ]
max=$[ $max + 1 ]
guesses_made=0
guess=-1
while [ $guess -ne $target ]; do
echo ==x==x==x== ==x==x==x== ==x==x==x==
if [[ $guesses_made -ne 0 && $guess -ge $min && $guess -le $max ]]; then
if [ $target -lt $guess ]; then
max=$guess
elif [ $target -gt $guess ]; then
min=$guess
fi
fi
echo $min \< Target number \< $max
read -p 'Please enter your guess: ' guess
guesses_made=$[ $guesses_made + 1 ]
echo Number of guesses made: $guesses_made
echo
done
echo
echo Your guess is correct. Target number $target is equal to your guess $guess.
echo Congratulations, you guessed the target number with $guesses_made guesses.
|
import React, { FunctionComponent } from 'react';
import { TexturedStyles } from '@elastic/charts';
export const TexturedStylesProps: FunctionComponent<TexturedStyles> = () => (
<div />
);
|
#!/bin/bash
# profiles = xccdf_org.ssgproject.content_profile_cui
# remediation = bash
. $SHARED/auditd_utils.sh
prepare_auditd_test_enviroment
set_parameters_value /etc/audit/auditd.conf "space_left_action" "suspend"
|
#!/bin/sh
### BEGIN INIT INFO
# Provides: gpsdproxy
# Required-Start: gpsd
# Required-Stop: gpsd
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: GPSDproxy daemon
# Description: Start/Stop script for the gpsd proxy daemon,
# GPS position read from gpsd daemon is forwarded
# to a remote host using UDP packets.
### END INIT INFO
PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
DESC='GPSD Proxy client'
NAME=gpsdproxy
DAEMON=/usr/local/sbin/$NAME
PIDFILE=/var/run/$NAME.pid
SCRIPTNAME=/etc/init.d/$NAME
client_id='gpsdproxy'
gpsd_host=127.0.0.1
gpsd_port=2947
remote_host=''
remote_port=''
interval=10
debug=3
test -f $DAEMON || exit 0
test -f /etc/default/gpsdproxy && . /etc/default/gpsdproxy
test -z "$remote_host" && exit 0
case "$1" in
start)
echo -n "Starting $DESC: "
start-stop-daemon --start --quiet --pidfile $PIDFILE \
--exec $DAEMON -- \
-i "$client_id" -t $interval \
-h $remote_host -p $remote_port -s $gpsd_host -r $gpsd_port \
-f $PIDFILE -d $debug -b
if [ $? = 0 ]; then
echo "$NAME."
else
echo "(failed.)"
fi
;;
stop)
echo -n "Stopping $DESC: "
start-stop-daemon --oknodo --stop --quiet --pidfile $PIDFILE
rm -f $PIDFILE
echo "gpsdproxy."
;;
*)
echo "Usage: $SCRIPTNAME {start|stop}"
exit 1
;;
esac
exit 0
|
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
public class FileHandler {
/**
* Reads a text file, identifies the line containing a specific keyword, and replaces the content of that line with new data.
*
* @param filePath the path to the text file
* @param keyword the keyword to search for in the file
* @param newData the new data to replace the line content
* @return true if the replacement is successful, false otherwise
*/
public boolean replaceLineWithKeyword(String filePath, String keyword, String newData) {
try {
FileReader fileReader = new FileReader(filePath);
BufferedReader bufferedReader = new BufferedReader(fileReader);
String line;
StringBuilder fileContent = new StringBuilder();
while ((line = bufferedReader.readLine()) != null) {
if (line.contains(keyword)) {
fileContent.append(newData).append(System.lineSeparator());
} else {
fileContent.append(line).append(System.lineSeparator());
}
}
bufferedReader.close();
FileWriter fileWriter = new FileWriter(filePath);
BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
bufferedWriter.write(fileContent.toString());
bufferedWriter.close();
return true;
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
} |
import { BaseInput, BaseInputProps } from '../base_input';
import { ForwardedRef, createElement, forwardRef } from 'react';
import { useRadioGroup } from '../radio_group';
export type RadioProps = Omit<BaseInputProps<'input'>, 'type'>;
function Radio(
props: RadioProps,
ref: ForwardedRef<HTMLInputElement>
): JSX.Element {
const { getGroupProps } = useRadioGroup();
return createElement(BaseInput, {
...getGroupProps(props),
type: 'radio',
ref,
});
}
export default forwardRef(Radio);
|
<filename>src/main/java/tcg/credential/ComponentAddress.java
package tcg.credential;
import org.bouncycastle.asn1.ASN1EncodableVector;
import org.bouncycastle.asn1.ASN1Object;
import org.bouncycastle.asn1.ASN1ObjectIdentifier;
import org.bouncycastle.asn1.ASN1Primitive;
import org.bouncycastle.asn1.ASN1Sequence;
import org.bouncycastle.asn1.DERSequence;
import org.bouncycastle.asn1.DERUTF8String;
/**
* <pre>
* ComponentAddress ::= SEQUENCE {
* addressType AddressType,
* addressValue UTF8String (SIZE (1..STRMAX)) }
*
* AddressType ::= OBJECT IDENTIFIER (tcg-address-ethernetmac | tcg-address-wlanmac | tcg-addressbluetoothmac)
* </pre>
*/
public class ComponentAddress extends ASN1Object {
//minimum 2, max 2
ASN1ObjectIdentifier addressType;
DERUTF8String addressValue;
public static ComponentAddress getInstance(Object obj) {
if (obj == null || obj instanceof ComponentAddress) {
return (ComponentAddress) obj;
}
if (obj instanceof ASN1Sequence) {
return new ComponentAddress((ASN1Sequence)obj);
}
throw new IllegalArgumentException("Illegal argument in getInstance: " + obj.getClass().getName());
}
private ComponentAddress(ASN1Sequence seq) {
if (seq.size() != 2) {
throw new IllegalArgumentException("Bad sequence size: " + seq.size());
}
ASN1Object[] elements = (ASN1Object[]) seq.toArray();
if (elements[0] instanceof ASN1ObjectIdentifier) {
addressType = (ASN1ObjectIdentifier) elements[0];
} else {
throw new IllegalArgumentException("Expected ASN1ObjectIdentifier, received " + elements[0].getClass().getName());
}
if (elements[1] instanceof DERUTF8String) {
addressValue = (DERUTF8String) elements[1];
} else {
throw new IllegalArgumentException("Expected DERUTF8String, received " + elements[1].getClass().getName());
}
}
public ComponentAddress(ASN1ObjectIdentifier addressType, DERUTF8String addressValue) {
this.addressType = addressType;
this.addressValue = addressValue;
}
public ASN1Primitive toASN1Primitive() {
ASN1EncodableVector vec = new ASN1EncodableVector();
vec.add(addressType);
vec.add(addressValue);
return new DERSequence(vec);
}
public ASN1ObjectIdentifier getAddressType() {
return addressType;
}
public DERUTF8String getAddressValue() {
return addressValue;
}
}
|
<reponame>benoitc/pypy
from __future__ import with_statement
import py
from pypy.rlib.rstring import StringBuilder, UnicodeBuilder
from pypy.rpython.annlowlevel import llstr, hlstr
from pypy.rpython.lltypesystem import rffi
from pypy.rpython.lltypesystem.rbuilder import *
from pypy.rpython.test.tool import BaseRtypingTest, LLRtypeMixin, OORtypeMixin
class TestStringBuilderDirect(object):
def test_simple(self):
sb = StringBuilderRepr.ll_new(3)
StringBuilderRepr.ll_append_char(sb, 'x')
StringBuilderRepr.ll_append(sb, llstr("abc"))
StringBuilderRepr.ll_append_slice(sb, llstr("foobar"), 2, 5)
StringBuilderRepr.ll_append_multiple_char(sb, 'y', 3)
s = StringBuilderRepr.ll_build(sb)
assert hlstr(s) == "xabcobayyy"
def test_nooveralloc(self):
sb = StringBuilderRepr.ll_new(3)
StringBuilderRepr.ll_append(sb, llstr("abc"))
assert StringBuilderRepr.ll_build(sb) == sb.buf
class BaseTestStringBuilder(BaseRtypingTest):
def test_simple(self):
def func():
s = StringBuilder()
s.append("a")
s.append("abc")
s.append_slice("abc", 1, 2)
s.append_multiple_char('d', 4)
return s.build()
res = self.ll_to_string(self.interpret(func, []))
assert res == "aabcbdddd"
def test_overallocation(self):
def func():
s = StringBuilder(4)
s.append("abcd")
s.append("defg")
s.append("rty")
return s.build()
res = self.ll_to_string(self.interpret(func, []))
assert res == "abcddefgrty"
def test_unicode(self):
def func():
s = UnicodeBuilder()
s.append(u'a')
s.append(u'abc')
s.append(u'abcdef')
s.append_slice(u'abc', 1, 2)
s.append_multiple_char(u'u', 4)
return s.build()
res = self.ll_to_unicode(self.interpret(func, []))
assert res == 'aabcabcdefbuuuu'
assert isinstance(res, unicode)
def test_string_getlength(self):
def func():
s = StringBuilder()
s.append("a")
s.append("abc")
return s.getlength()
res = self.interpret(func, [])
assert res == 4
def test_unicode_getlength(self):
def func():
s = UnicodeBuilder()
s.append(u"a")
s.append(u"abc")
return s.getlength()
res = self.interpret(func, [])
assert res == 4
def test_append_charpsize(self):
def func(l):
s = StringBuilder()
with rffi.scoped_str2charp("hello world") as x:
s.append_charpsize(x, l)
return s.build()
res = self.ll_to_string(self.interpret(func, [5]))
assert res == "hello"
def test_builder_or_none(self):
def g(s):
if s:
s.append("3")
return bool(s)
def func(i):
if i:
s = StringBuilder()
else:
s = None
return g(s)
res = self.interpret(func, [0])
assert not res
res = self.interpret(func, [1])
assert res
def test_unicode_builder_or_none(self):
def g(s):
if s:
s.append(u"3")
return bool(s)
def func(i):
if i:
s = UnicodeBuilder()
else:
s = None
return g(s)
res = self.interpret(func, [0])
assert not res
res = self.interpret(func, [1])
assert res
class TestLLtype(BaseTestStringBuilder, LLRtypeMixin):
pass
class TestOOtype(BaseTestStringBuilder, OORtypeMixin):
def test_append_charpsize(self):
py.test.skip("append_charpsize(): not implemented on ootype")
|
#!/bin/bash
curl -X POST http://localhost:8000 -H 'Content-Type: application/x-amz-json-1.0' -H 'Authorization: AWS4-HMAC-SHA256 Credential=XXX, SignedHeaders=YYY, Signature=ZZZ' -H 'X-Amz-Target: DynamoDB_20120810.CreateTable' --data @/migration/cdstore-Albums.json |
export enum Sex {
Male,
Female,
Unknown
}
export enum layoutState {
Expanded,
Aggregated,
Hidden
}
/**
* This class holds all attributes of a node in the genealogy graph.
*/
export default class Node {
/** This node's ID */
id: string;
/** This node's uniqueID */
uniqueID: string;
// TODO - what's type?
type: string;
/** The y index of the node as rendered currently */
y: number;
/** The y index of the node in the original array, independent of hiding/aggregation */
originalY: number;
/** The x position of the node, expressed as birth year by default, can change on aggregation */
x: number;
/** keeps track of nodes original x position - can change for kid grids on hide. */
originalX: number;
/** flag to indicate a starting point of aggregation. used to recreate aggregate states when POI is changed */
aggregateBranch: boolean;
// ------ Attributes of the node from the data ------
sex: Sex;
/** Year of Birth */
bdate: number;
/** Year of Death */
ddate: number;
deceased: string;
// ----- Relationship information -------
maID: string;
paID: string;
kindredID: string;
/** Reference to the mother */
ma: Node;
/** Reference to the father */
pa: Node;
/** keeps track of nuclear families a given node belongs to. */
familyIds: string[];
hasChildren: boolean;
/** Array of children */
children: Node[];
/** Array of spouses (some have more than one) */
spouse: Node[];
// ----- Visible Node Attributes -----
/** Is this node currently considered to be affected */
affected: boolean;
/** Keep track of primary attribute and what 'affected' means for this attribute data. */
primary;
/** Keep track of secondary attribute and what 'affected' means for this attribute data. */
secondary;
state: layoutState;
// ----- Node State ------
hidden: boolean;
aggregated: boolean;
/** used to keep track of clicked nodes even when they are removed from the visible area. May not need if nodes are not removed and simply scroll out of view. */
clicked: boolean;
// ----- Derived Attributes -----
// d.deceased = d.deceased === 'Y'; //transform to boolean values
generation: number; //indicator that generation has not been set
//flag for blood descendants of founders - not in use yet (2/23/17)
descendant?: boolean;
// ----- De-cycling data -----
//keep track of any duplicates of this node */
duplicates: Node[];
// used for deCycling the tree
visited: boolean;
//flag for bdates that are inferred.
inferredBdate:boolean;
//flag for whether there was death/deceased information.
hasDdate:boolean;
// TODO what is target?
target: Node;
/** Default initialization for attributes */
constructor(id: string) {
this.type = 'single';
this.id = undefined;
this.kindredID = undefined;
this.uniqueID = id; //use phovea defined unique id
this.hidden = false;
this.aggregated = false;
this.generation = -1;
this.descendant = false;
this.familyIds = [];
this.clicked = false;
this.primary = undefined;
this.secondary = undefined;
this.hasChildren = false;
this.children = [];
this.spouse = [];
this.duplicates = [];
this.visited = false;
this.deceased = 'Y';
this.affected = false;
this.state = layoutState.Expanded;
this.inferredBdate = false;
this.hasDdate = true;
}
/** Initialize the node based on rows */
public initialize(columnNameToIndex: any, row: any) {
this.sex = (row[columnNameToIndex.sex] === 'M') ? Sex.Male : Sex.Female;
this.id = row[columnNameToIndex.RelativeID].toString();
this.bdate = +row[columnNameToIndex.bdate] ;
this.ddate = (columnNameToIndex.ddate ? +row[columnNameToIndex.ddate] : undefined);
this.x= +row[columnNameToIndex.bdate];
this.maID = row[columnNameToIndex.MaID].toString();
this.paID = row[columnNameToIndex.PaID].toString();
this.kindredID = row[columnNameToIndex.KindredID].toString();
this.hasDdate = columnNameToIndex.ddate ? true : false;
// this.deceased = row[columnNameToIndex.deceased].toString();
}
}
|
<reponame>tylertucker202/argovis_backend
const Profile = require('../models/profile')
const moment = require('moment')
const GJV = require('geojson-validation')
const helper = require('../public/javascripts/controllers/profileHelperFunctions')
const HELPER_CONST = require('../public/javascripts/controllers/profileHelperConstants')
const util = require('util');
// Display list of Profiles in a list of _ids
exports.profile_list = function(req, res, next) {
req.checkQuery('ids', 'ids should be specified.').notEmpty()
//req.sanitize('ids').escape()
req.sanitize('ids').trim()
req.sanitize('presRange').escape()
req.sanitize('presRange').trim()
const errors = req.validationErrors()
if (errors) {
res.send('There have been validation errors: ' + util.inspect(errors), 400);
return;
}
const _ids = JSON.parse(req.query.ids.replace(/'/g, '"'))
let presRange = null
let maxPres = null
let minPres = null
if (req.query.presRange) {
presRange = JSON.parse(req.query.presRange)
maxPres = Number(presRange[1])
minPres = Number(presRange[0])
}
idMatch = {$match: {_id: { $in: _ids}}}
let idAgg = []
idAgg.push(idMatch)
if (presRange){
idAgg.push(helper.make_pres_project(minPres, maxPres, 'measurements'))
}
idAgg.push({$project: HELPER_CONST.PROF_PROJECT_WITH_PRES_RANGE_COUNT})
idAgg.push({$match: { count: {$gt: 0}}})
idAgg.push({$sort: { date: -1}})
const query = Profile.aggregate(idAgg)
query.exec( function (err, profiles) {
if (err) {
// console.log('an error:', err)
return next(err)
}
// console.log('len prof: ', profiles.length)
res.json(profiles)
})
}
exports.profile_detail = function (req, res, next) {
req.checkParams('_id', 'Profile id should be specified.').notEmpty()
req.sanitize('_id').escape()
const errors = req.validationErrors();
if (errors) {
res.send('There have been validation errors: ' + util.inspect(errors), 400);
return;
}
else {
let query = Profile.findOne({ _id: req.params._id })
if (req.params.format==='map') {
query.select(HELPER_CONST.MAP_PARAMS)
}
if (req.params.format==='page') {
query.select('-bgcMeas') //bgcMeas can be large
}
if (req.params.format==='bgcPage') {
query.select('-measurements') //remove unneeded measurements. can be large
}
let promise = query.exec()
promise
.then(function (profile) {
if (req.params.format==='page'){
if (profile === null) { res.send('profile not found') }
else {
profileDate = moment.utc(profile.date).format('YYYY-MM-DD HH:mm')
res.render('profile_page', {title: req.params._id, profile: profile,
platform_number: profile.platform_number,
profileDate: profileDate})
}
}
else if (req.params.format==='bgcPage'){
if (profile === null) { res.send('profile not found') }
if (profile.bgcMeas === null) { res.send('profile does not have bgc') }
else {
profileDate = moment.utc(profile.date).format('YYYY-MM-DD HH:mm')
res.render('bgc_profile_page', {title: req.params._id, profile: profile,
platform_number: profile.platform_number,
paramKeys: profile.bgcMeasKeys, profileDate: profileDate})
}
}
else {
res.json(profile)
}
})
.catch(function(err) { return next(err)})
}
}
exports.selected_profile_list = function(req, res , next) {
req.checkQuery('startDate', 'startDate should be specified.').notEmpty()
req.checkQuery('endDate', 'endDate should be specified.').notEmpty()
req.checkQuery('shape', 'shape should be specified.').notEmpty()
req.sanitize('presRange').escape()
req.sanitize('presRange').trim()
req.sanitize('_id').escape()
req.sanitize('startDate').toDate()
req.sanitize('endDate').toDate()
const errors = req.validationErrors()
if (errors) {
res.status(400).send('There have been validation errors: ' + util.inspect(errors))
return
}
const shape = JSON.parse(req.query.shape)
const shapeJson = {'type': 'Polygon', 'coordinates': shape}
let presRange = null
let maxPres = null
let minPres = null
let deepOnly = null
let bgcOnly = null
if (req.query.presRange) {
presRange = JSON.parse(req.query.presRange)
maxPres = Number(presRange[1])
minPres = Number(presRange[0])
}
if (req.query.bgcOnly) {
bgcOnly = true
}
if (req.query.deepOnly) {
deepOnly = true
}
const startDate = moment.utc(req.query.startDate, 'YYYY-MM-DDTHH:mm:ss')
const endDate = moment.utc(req.query.endDate, 'YYYY-MM-DDTHH:mm:ss')
console.log('startDate: ', startDate, 'endDate', endDate)
const dateDiff = endDate.diff(startDate)
const monthDiff = Math.floor(moment.duration(dateDiff).asMonths())
if (monthDiff > 3) {
throw new Error('time range exceeds 3 months. consider making query smaller')
}
GJV.valid(shapeJson)
GJV.isPolygon(shapeJson)
req.getValidationResult().then(function (result) {
if (!result.isEmpty()) {
const errors = result.array().map(function (elem) {
return elem.msg
})
res.render('error', { errors: errors })
}
else {
let agg = []
if (req.params.format === 'map' && presRange) {
agg = helper.make_map_pres_agg(minPres, maxPres, shapeJson, startDate, endDate)
}
else if (req.params.format === 'map' && !presRange) {
agg = [ {$match: {geoLocation: {$geoWithin: {$geometry: shapeJson}}}},
{$match: {date: {$lte: endDate.toDate(), $gte: startDate.toDate()}}},
{$project: HELPER_CONST.MAP_PROJ},
{$limit: 1001}
]
}
else if (req.params.format !== 'map' && presRange) {
agg = helper.make_pres_agg(minPres, maxPres, shapeJson, startDate, endDate)
}
else {
agg = [ {$match: {geoLocation: {$geoWithin: {$geometry: shapeJson}}}},
{$match: {date: {$lte: endDate.toDate(), $gte: startDate.toDate()}}},
]
}
if (deepOnly) {
agg.push({$match: {isDeep: true}})
}
if (bgcOnly) {
agg.push({$match: {containsBGC: true}})
}
agg.push({$sort: { date: -1}}) // TODO: test if this causes slowdown)
const query = Profile.aggregate(agg)
const promise = query.exec()
promise
.then(function (profiles) {
//create virtural fields.
profiles = helper.make_virtural_fields(profiles)
//render page
if (req.params.format==='page'){
if (profiles === null) { res.send('profile not found') }
else {
res.render('selected_profile_page', {title:'Custom selection', profiles: JSON.stringify(profiles), moment: moment, url: req.originalUrl })
}
}
else {
res.json(profiles)
}
})
.catch(function(err) { return next(err)})
}})
}
exports.select_profile_2d = function(req, res , next) {
req.checkQuery('startDate', 'startDate should be specified.').notEmpty()
req.checkQuery('endDate', 'endDate should be specified.').notEmpty()
req.checkQuery('llCorner', 'shape should be specified.').notEmpty()
req.checkQuery('urCorner', 'shape should be specified.').notEmpty()
req.sanitize('presRange').escape()
req.sanitize('presRange').trim()
req.sanitize('_id').escape()
req.sanitize('startDate').toDate()
req.sanitize('endDate').toDate()
const errors = req.validationErrors()
if (errors) {
res.send('There have been validation errors: ' + util.inspect(errors), 400)
return
}
const llCorner = JSON.parse(req.query.llCorner)
const urCorner = JSON.parse(req.query.urCorner)
const box = [llCorner, urCorner]
shapeBool = false
let presRange = null
let maxPres = null
let minPres = null
let deepOnly = null
let bgcOnly = null
if (req.query.presRange) {
presRange = JSON.parse(req.query.presRange)
maxPres = Number(presRange[1])
minPres = Number(presRange[0])
}
if (req.query.bgcOnly) {
bgcOnly = true
}
if (req.query.deepOnly) {
deepOnly = true
}
const startDate = moment.utc(req.query.startDate, 'YYYY-MM-DD')
const endDate = moment.utc(req.query.endDate, 'YYYY-MM-DD')
const dateDiff = endDate.diff(startDate)
const monthDiff = Math.floor(moment.duration(dateDiff).asMonths())
if (monthDiff > 3) {
throw new Error('time range exceeds 3 months. consider making query smaller')
}
req.getValidationResult().then(function (result) {
if (!result.isEmpty()) {
const errors = result.array().map(function (elem) {
return elem.msg
})
res.render('error', { errors: errors })
}
else {
let agg = []
if (req.params.format === 'map' && presRange) {
agg = helper.make_map_pres_agg(minPres, maxPres, box, startDate, endDate, shapeBool)
}
else if (req.params.format === 'map' && !presRange) {
agg = [ {$match: {geoLocation: {$geoWithin: {$box: box}}}},
{$match: {date: {$lte: endDate.toDate(), $gte: startDate.toDate()}}},
{$project: HELPER_CONST.MAP_PROJ},
{$limit: 1001}
]
}
else if (req.params.format !== 'map' && presRange) {
agg = helper.make_pres_agg(minPres, maxPres, box, startDate, endDate, shapeBool)
}
else {
agg = [ {$match: {geoLocation: {$geoWithin: {$box: box}}}},
{$match: {date: {$lte: endDate.toDate(), $gte: startDate.toDate()}}}
]
}
if (deepOnly) {
agg.push({$match: {isDeep: true}})
}
if (bgcOnly) {
agg.push({$match: {containsBGC: true}})
}
agg.push({$sort: { date: -1}}) // TODO: test if this causes slowdown)
const query = Profile.aggregate(agg)
const promise = query.exec()
promise
.then(function (profiles) {
//create virtural fields.
profiles = helper.make_virtural_fields(profiles)
//render page
if (req.params.format==='page'){
if (profiles === null) { res.send('profile not found') }
else {
res.render('selected_profile_page', {title:'Custom box selection', profiles: JSON.stringify(profiles), moment: moment, url: req.originalUrl })
}
}
else {
res.json(profiles)
}
})
.catch(function(err) { return next(err)})
}})
}
|
# send a deep link to the ios simulator
xcrun simctl openurl booted "celo://wallet/pay?address=0x0b784e1cf121a2d9e914ae8bfe3090af0882f229&displayName=Crypto4BlackLives&e164PhoneNumber=%2B14046251530"
|
#!/bin/sh
set -e
set -x
os=`uname`
TRAVIS_ROOT="$1"
case "$os" in
Darwin)
echo "Mac"
brew update
brew unlink python@2 || brew uninstall python@2
brew upgrade python || brew install python
brew upgrade numpy || brew install numpy
brew link --overwrite python
;;
Linux)
echo "Linux"
;;
esac
|
require 'twilio-ruby'
# Get your Account SID and Auth Token from twilio.com/console
# To set up environmental variables, see http://twil.io/secure
account_sid = ENV['TWILIO_ACCOUNT_SID']
auth_token = ENV['TWILIO_AUTH_TOKEN']
service_sid = 'ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
@client = Twilio::REST::Client.new(account_sid, auth_token)
service = @client.chat.v2.services(service_sid)
# List roles
service.roles.each do |r|
puts "Role #{r.sid} has permissions #{r.permissions.join(', ')}"
end
|
<reponame>plusmancn/hyxcache
/**
* 会友行人员名单导入脚本
*/
var fs = require('fs');
var csv = require('csv');
var Q = require('q');
var request = require('request');
var config = require('../package.json');
var importSeeting = {
meetingId:'554a34cee4b0679ef61499d6',
contactData:[]
}
readCSVFile('../testFolder/attender.csv')
.then(function(data){
importSeeting.contactData = data;
var promises = [];
data.forEach(function(user){
promises.push(meetingApply(user[0],user[1]));
});
return Q.all(promises);
})
.then(function(afterApply){
var promises = [];
for (var i = 0; i < afterApply.length; i++) {
console.log(afterApply[i]);
var params = {
mobilePhoneNumber:importSeeting.contactData[i][1],
username:importSeeting.contactData[i][0],
notification:'消息推送测试'
}
promises.push(runCloudFunc('smsBoard',params));
};
return Q.all(promises);
})
.then(function(success){
console.log(success);
},function(err){
console.log(err);
});
/**
* 用户注册或者报名,密码为<PASSWORD>
*/
function meetingApply(userName,mobilePhoneNumber){
var deferred = Q.defer();
var params = {
meetingId:importSeeting.meetingId,
mobilePhoneNumber:mobilePhoneNumber,
name:userName,
message:'主办方名单导入',
}
runCloudFunc('',params,'meetingApply').then(function(result){
deferred.resolve(result);
});
return deferred.promise;
}
/**
* 读取CSV文件
* @param path 文件路径
*/
function readCSVFile(path){
return Q.Promise(function(resolve,reject,notify){
var attenerString = fs.readFileSync(path,'utf-8');
csv.parse(attenerString,function(err,data){
// 去除空格
data.forEach(function(line){
for(var i=0;i<line.length;i++){
line[i] = line[i].trim();
}
});
if (!err) {
resolve(data);
}else{
reject(new Error(err));
}
});
});
}
/**
* 发送网络请求
*/
function runCloudFunc(func,params,thirdParty){
return Q.Promise(function(resolve,reject,notify){
if (typeof(thirdParty) == 'undefined') {
thirdParty = "CallCloudFunc";
}
params.func = func;
var cloudFuncUrl = config.cloudFuncUrl + '/' + thirdParty;
request.post({url:cloudFuncUrl, json: params}, function(err,httpResponse,body){
if (!err && httpResponse.statusCode == 200) {
resolve(body);
}else{
reject(new Error(err))
}
});
});
} |
<filename>packages/amplication-server/src/enums/EnumProvider.ts
export enum EnumProvider {
Github
}
|
const dirIcon = Vue.prototype.$global.board.board_info.dir;
module.exports = function(Blockly) {
"use strict";
Blockly.Blocks["neopixel_rgb_begin"] = {
init: function() {
this.appendDummyInput()
.appendField(new Blockly.FieldImage(`file:///${dirIcon}/static/icons/1601900.png`,20,20,"*"))
.appendField("NeoPixel begin");
this.appendValueInput("PIN")
.setCheck("Number")
.appendField("Pin");
this.appendValueInput("NUM")
.setCheck("Number")
.appendField("Number of Pixels");
this.setInputsInline(true);
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(65);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks["neopixel_rgb_clear"] = {
init: function() {
this.appendDummyInput()
.appendField(new Blockly.FieldImage(`file:///${dirIcon}/static/icons/1601900.png`,20,20,"*"))
.appendField("NeoPixel Clear");
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(65);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks["neopixel_rgb_show"] = {
init: function() {
this.appendDummyInput()
.appendField(new Blockly.FieldImage(`file:///${dirIcon}/static/icons/1601900.png`,20,20,"*"))
.appendField("NeoPixel Show");
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(65);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks["neopixel_rgb_setBrightness"] = {
init: function() {
this.appendValueInput("BRIGHT")
.appendField(new Blockly.FieldImage(`file:///${dirIcon}/static/icons/1601900.png`,20,20,"*"))
.setCheck("Number")
.appendField("NeoPixel setBrightness (0-255)");
this.setInputsInline(true);
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(65);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks["neopixel_rgb_setPixelColor"] = {
init: function() {
this.appendValueInput("NUM")
.appendField(new Blockly.FieldImage(`file:///${dirIcon}/static/icons/1601900.png`,20,20,"*"))
.setCheck("Number")
.appendField("NeoPixel set Pixel");
this.appendDummyInput()
.appendField("Color")
.appendField(new Blockly.FieldColour("#FFFFFF"), "COLOR");
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(65);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks["neopixel_rgb_fillLED"] = {
init: function() {
this.appendDummyInput()
.appendField(new Blockly.FieldImage(`file:///${dirIcon}/static/icons/1601900.png`,20,20,"*"))
.appendField("NeoPixel fill all LED color")
.appendField(new Blockly.FieldColour("#FFFFFF"), "COLOR");
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(65);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks["neopixel_rgb_colorWipe"] = {
init: function() {
this.appendDummyInput()
.appendField(new Blockly.FieldImage(`file:///${dirIcon}/static/icons/1601900.png`,20,20,"*"))
.appendField("NeoPixel colorWipe")
.appendField(new Blockly.FieldColour("#FFFFFF"), "COLOR");
this.appendValueInput("TIME")
.setCheck("Number")
.appendField("Time(ms)");
this.setInputsInline(true);
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(65);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks["neopixel_rgb_theaterChase"] = {
init: function() {
this.appendDummyInput()
.appendField(new Blockly.FieldImage(`file:///${dirIcon}/static/icons/1601900.png`,20,20,"*"))
.appendField("NeoPixel theaterChase")
.appendField(new Blockly.FieldColour("#FFFFFF"), "COLOR");
this.appendValueInput("TIME")
.setCheck("Number")
.appendField("Time(ms)");
this.setInputsInline(true);
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(65);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks["neopixel_rgb_rainbow_begin"] = {
init: function() {
this.appendDummyInput()
.appendField(new Blockly.FieldImage(`file:///${dirIcon}/static/icons/1601900.png`,20,20,"*"))
.appendField("NeoPixel rainbow Begin");
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(65);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks["neopixel_rgb_rainbow"] = {
init: function() {
this.appendValueInput("TIME")
.appendField(new Blockly.FieldImage(`file:///${dirIcon}/static/icons/1601900.png`,20,20,"*"))
.setCheck("Number")
.appendField("NeoPixel rainbow Time(ms)");
this.setInputsInline(true);
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(65);
this.setTooltip("");
this.setHelpUrl("");
}
};
Blockly.Blocks["neopixel_rgb_rainbowCycle"] = {
init: function() {
this.appendValueInput("TIME")
.appendField(new Blockly.FieldImage(`file:///${dirIcon}/static/icons/1601900.png`,20,20,"*"))
.setCheck("Number")
.appendField("NeoPixel rainbowCycle Time(ms)");
this.setInputsInline(true);
this.setPreviousStatement(true, null);
this.setNextStatement(true, null);
this.setColour(65);
this.setTooltip("");
this.setHelpUrl("");
}
};
// ######################################################################
};
|
<filename>primary_insertion_best_benefit_item_limit/unchecked_items.h
#pragma once
#include <vector>
#include "knapsack_item.h"
class unchecked_items
{
std::vector<knapsack_item*>* unchecked_items_;
std::vector<knapsack_item*>* not_inserted_items_;
public:
unchecked_items();
void load() const;
knapsack_item* get_item() const;
knapsack_item* get_next_appropriate_item(unsigned int capacity) const;
void print_unchecked() const;
void print_not_inserted() const;
static bool sort_knapsack_items_by_benefit(knapsack_item* item_1,knapsack_item* item_2);
~unchecked_items();
};
|
<reponame>smagill/opensphere-desktop
package io.opensphere.core.model;
import gnu.trove.map.TObjectIntMap;
import gnu.trove.map.hash.TObjectIntHashMap;
import io.opensphere.core.math.Vector3d;
import io.opensphere.core.model.Tessera.TesseraVertex;
import io.opensphere.core.model.TesseraList.TesseraBlockBuilder;
import io.opensphere.core.util.Utilities;
/**
* A tessera block builder which handles mapping terrain vertices.
*
* @param <T> the type of vertex used by this builder.
*/
public class SimpleTesseraBlockBuilder<T extends TesseraVertex<?>> extends TesseraBlockBuilder<T>
{
/**
* A map of vertices to the index within the block. This map uses -1 to mean
* "no entry."
*/
private final TObjectIntMap<T> myIndexMap = new TObjectIntHashMap<>(10, .5f, -1);
/**
* The origin of the model coordinate space for the results. If no
* adjustment is required {@code null} may be used.
*/
private final Vector3d myModelCenter;
/**
* Constructor.
*
* @param tesseraVertexCount The number of vertices per tessera.
* @param modelCenter The origin of the model coordinate space for the
* results. If no adjustment is required {@code null} may be
* given.
*/
public SimpleTesseraBlockBuilder(int tesseraVertexCount, Vector3d modelCenter)
{
super(tesseraVertexCount);
myModelCenter = modelCenter;
}
/**
* Add a tessera to the block of tesserae.
*
* @param vertices The vertices which make up the tessera.
*/
@SuppressWarnings("unchecked")
public void add(T[] vertices)
{
if (getBlockTesseraVertexCount() != vertices.length)
{
throw new IllegalArgumentException(
"Wrong number of verticies (" + vertices.length + "). Exepected " + getBlockTesseraVertexCount());
}
for (T vert : vertices)
{
int index = myIndexMap.get(vert);
// check to see if the index is the "no entry" value
if (index == -1)
{
index = getNextIndex();
T vertex = vert;
if (myModelCenter != null && !Utilities.sameInstance(myModelCenter, Vector3d.ORIGIN))
{
vertex = (T)vert.adjustToModelCenter(myModelCenter);
}
getBlockVertices().add(vertex);
myIndexMap.put(vertex, index);
}
getBlockIndices().add(index);
}
}
}
|
<reponame>krzysztofgajda/python
'''
Created on 2010-02-28
@author: tomek
'''
from math import sqrt
a = 1
b = 0
c = 1
d = b * b -4 * a * c
# wynik = ((a == 0) and [
# ((b == 0) and [ ((c == 0) and ['Dozo'] or ['Brak'])[0]
# ] or
# [ 'x = %f' % (-c)/(b*1.0)
# ])[0]
# ] or
# [ ((d>0) and ['x1 = %f, x2 = %f' % ( (-b - sqrt(d))/(2*a), (-b + sqrt(d))/(2*a) )]
# or [ ((d==0) and ['x = %f' % ((-b)/(2.0*a))] or ["...."])[0] ]) [0]
# ]
# )[0]
#
# print wynik
#
# wynik = (('Duzo' if (c == 0) else 'Brak' )
# if (b == 0)
# else 'x = %f' % (-c)/(b*1.0)
# ) if (a == 0) \
# else ('x1 = %f, x2 = %f' % ( (-b - sqrt(d))/(2*a), (-b + sqrt(d))/(2*a) )) \
# if (d>0) else \
# ( 'x = %f' % ((-b)/(2.0*a)) if (d==0) else "...."
# )
# print wynik
if a == 0:
if b ==0:
if c==0:
print 'Duzo'
else:
print "Brak"
else:
print 'x = ', (-c)/(b*1.0)
else:
d = b * b -4 * a * c
if d > 0:
dd = sqrt(d)
print 'x1 = ', (-b - dd)/(2*a)
print 'x2 = ', (-b + dd)/(2*a)
elif d == 0:
print 'x = ', (-b)/(2.0*a)
else:
dd = sqrt(-d)*1j
print 'x1 = ', (-b - dd)/(2*a)
print 'x2 = ', (-b + dd)/(2*a)
|
ssh -x root@$1 "/sbin/shutdown.sh && /sbin/poweroff"
|
<reponame>buiminhhai1/mhh-backend-service
import { JwtService } from '@nestjs/jwt';
import { Injectable, Logger, NestMiddleware, UnauthorizedException } from '@nestjs/common';
import { CustomHttpRequest } from '../interfaces';
import { NguoiDungEntity } from '../../entities';
@Injectable()
export class AuthMiddleware implements NestMiddleware {
constructor(private readonly jwtService: JwtService) {}
private readonly logger = new Logger(AuthMiddleware.name);
async use(req: CustomHttpRequest, res: Response, next: () => void) {
this.logger.warn("go to middleware");
const authorizationHeader = <string>req.headers['authorization'] || '';
if (authorizationHeader) {
try {
this.logger.log('- Got header authorization');
this.logger.log(authorizationHeader);
const jwts = authorizationHeader.split(' ');
if (jwts.length === 2) {
const user: Partial<NguoiDungEntity> = await this.jwtService.verifyAsync(jwts[1]);
req.id = user.id;
req.tenDangNhap = user.tenDangNhap;
req.vaiTro = user.vaiTro;
}
} catch (e) {
this.logger.error(JSON.stringify(e, null, 2));
}
} else {
throw new UnauthorizedException('Token invalid');
}
next();
}
}
|
<reponame>Tshisuaka/api-snippets
require 'twilio-ruby'
# Required for any Twilio Access Token
# To set up environmental variables, see http://twil.io/secure
account_sid = ENV['TWILIO_ACCOUNT_SID']
api_key = ENV['TWILIO_API_KEY']
api_secret = ENV['TWILIO_API_KEY_SECRET']
# Required for Video
identity = 'user'
# Create Video grant for our token
video_grant = Twilio::JWT::AccessToken::VideoGrant.new
video_grant.room = 'cool room'
# Create an Access Token
token = Twilio::JWT::AccessToken.new(
account_sid,
api_key,
api_secret,
[video_grant],
identity: identity
)
# Generate the token
puts token.to_jwt
|
<reponame>joonhocho/sanivali
import type { SanivaliDefaultRuleSchema } from '../defaultDefsTypes';
import type { ISanivaliDef } from '../types';
import type { Sanivali } from '../sanivali';
export declare type AnyOfParam<T = SanivaliDefaultRuleSchema> = Array<T | Sanivali>;
export declare type AnyOfRuleItem<T = SanivaliDefaultRuleSchema> = [
'anyOf',
AnyOfParam<T>
];
export declare const anyOfDef: ISanivaliDef;
//# sourceMappingURL=anyOf.d.ts.map |
#!/bin/bash -e
# variables which are used while rendering templates are exported
{
TYPE="$1"
SERVICE="$2"
if [ X"${SERVICE}" = X"" ]; then
SERVICE="${TYPE}"
TYPE=""
fi
if [ X"${SERVICE}" = X"" ]; then
echo "usage create-sa.sh servicename"
exit 1
fi
OU="Bind Users"
if [ X"${TYPE}" = X"rw" ]; then
OU="Special Users"
fi
export OU
LDAP_DOMAIN=$(doguctl config --global domain)
export LDAP_DOMAIN
# proposal: use doguctl config openldap_suffix in future
OPENLDAP_SUFFIX="dc=cloudogu,dc=com"
export OPENLDAP_SUFFIX
# create random schema suffix and password
USERNAME="${SERVICE}_$(doguctl random -l 6)"
export USERNAME
PASSWORD=$(doguctl random)
ENC_PASSWORD=$(slappasswd -s "${PASSWORD}")
export ENC_PASSWORD
doguctl template /srv/openldap/new-user.ldif.tpl /srv/openldap/new-user_"${USERNAME}".ldif
ldapadd -f "/srv/openldap/new-user_${USERNAME}.ldif"
} >/dev/null 2>&1
# print details
echo "username: cn=${USERNAME},ou=${OU},o=${LDAP_DOMAIN},${OPENLDAP_SUFFIX}"
echo "password: ${PASSWORD}"
|
<reponame>ixrjog/caesar-web<gh_stars>1-10
export function getJobBuildStatusType (value) {
switch (value) {
case 'FAILURE':
return 'danger'
case 'UNSTABLE':
return 'warning'
case 'REBUILDING':
return 'warning'
case 'BUILDING':
return 'warning'
case 'ABORTED':
return 'warning'
case 'SUCCESS':
return 'success'
case 'UNKNOWN':
return 'warning'
case 'NOT_BUILT':
return 'warning'
case 'CANCELLED':
return 'danger'
default:
return 'info'
}
}
export function getJobBuildStatusText (value) {
switch (value) {
case 'FAILURE':
return '失败'
case 'UNSTABLE':
return '不稳定'
case 'REBUILDING':
return '重建'
case 'BUILDING':
return '构建中'
case 'ABORTED':
return '用户中止'
case 'SUCCESS':
return '成功'
case 'UNKNOWN':
return '未知'
case 'NOT_BUILT':
return '不是你的'
case 'CANCELLED':
return '取消'
default:
return '执行中'
}
}
|
<reponame>yintaoxue/learn
package org.ruogu.learn.lang.waitnotify;
/**
* WaitNotifyTest
*
* @author xueyintao 2016年12月3日 下午5:29:29
*/
public class WaitNotifyTest {
/**
* @param args
*/
public static void main(String[] args) {
Object lock = new Object();
}
}
class WaitThread extends Thread {
private Object lock;
WaitThread(Object lock) {
this.lock = lock;
}
@Override
public void run() {
System.out.println("WaitThread start...");
synchronized (lock) {
System.out.println("WaitThread get lock.");
}
System.out.println("WaitThread end...");
}
}
class NotifyThread extends Thread {
private Object lock;
NotifyThread(Object lock) {
this.lock = lock;
}
@Override
public void run() {
System.out.println("NotifyThread start...");
System.out.println("NotifyThread end...");
}
} |
<filename>src/component/theme_loader.js
import { html, render } from 'lit-html';
export default class ThemeLoader {
constructor(translation) {
this.translation = translation;
this.defaultTheme = "plugchecker";
this.themes = {
[this.defaultTheme] : {
titleBarHtml: "chargeprice",
title: translation.get("title"),
favicon: "img/favicon-32x32.png",
themeColor: "#3498db",
appleTouchIcon: "/img/logos/apple-touch-icon.png"
},
emc: {
titleBarHtml: html`
<a href="https://www.emcaustria.at/" target="_blank">
<img id="logo" src="themes/emc/logo.png"/>
<img id="logoText" src="themes/emc/text.svg"/>
</a>
`,
favicon: "themes/emc/logo.png",
name: `EMC ${translation.get("themeTitle")}`,
themeColor: "#8fbf22",
appleTouchIcon: "themes/emc/logo.png",
highlightedTariffs: ["a480edbe-d673-4faa-ad70-5d22273d15a0"]
},
nissan: {
titleBarHtml: `<img id=\"logo\" src=\"themes/nissan/logo.png\"/><span class=\"title\">${translation.get("themeTitle")}</span>`,
favicon: "themes/nissan/logo.png",
name: `Nissan ${translation.get("themeTitle")}`,
themeColor: "#c3002f",
appleTouchIcon: "/img/logos/apple-touch-icon.png"
},
oeamtc: {
titleBarHtml: `<img id=\"logo\" src=\"themes/oeamtc/logo.png\"/><span class=\"title\">${translation.get("themeTitle")}</span>`,
favicon: "themes/oeamtc/logo.png",
name: `ÖAMTC Ladepreise`,
themeColor: "#ffdc00",
appleTouchIcon: "themes/oeamtc/logo.png"
},
"billig-tanken" : {
titleBarHtml: "chargeprice",
title: translation.get("title"),
favicon: "img/favicon-32x32.png",
name: `<NAME>`,
themeColor: "#009688",
appleTouchIcon: "/img/logos/apple-touch-icon.png"
},
aprr: {
titleBarHtml: `<img id=\"logo\" src=\"themes/aprr/logo.png\"/><span class=\"title\">${translation.get("themeTitle")}</span>`,
favicon: "themes/aprr/logo.png",
name: translation.get("themeTitle"),
themeColor: "#ce0000",
appleTouchIcon: "themes/aprr/logo.png"
},
asfinag: {
titleBarHtml: `<img id=\"logo\" src=\"themes/asfinag/logo.png\"/><span class=\"title\">${translation.get("themeTitle")}</span>`,
favicon: "themes/asfinag/logo.png",
name: translation.get("themeTitle"),
themeColor: "#bc5408",
appleTouchIcon: "themes/asfinag/logo.png"
},
instadrive: {
titleBarHtml: html`
<img id="logo" class="mobile-hidden" src="themes/instadrive/logo.svg"/>
<img id="logo" class="mobile-shown" src="themes/instadrive/icon.svg"/>
<img id="logoText" src="themes/instadrive/text.svg"/>
`,
favicon: "themes/instadrive/favicon.png",
name: "Instadrive Ladepreise",
themeColor: "#16bae7",
appleTouchIcon: "themes/asfinag/logo.png"
},
}
}
isDefaultTheme(){
return this.getValidatedTheme() == this.defaultTheme;
}
getValidatedTheme(){
if(window.location.hostname.includes("ladepreise.at")) return "emc";
const theme = new URL(window.location.href).searchParams.get("theme");
return this.themes.hasOwnProperty(theme) ? theme : this.defaultTheme;
}
setCurrentTheme(){
const themeId = this.getValidatedTheme();
const theme = this.themes[themeId];
// Set CSS
var newSS=document.createElement('link');
newSS.rel='stylesheet';
newSS.href=`themes/${themeId}/style.css?v=1`;
document.getElementsByTagName("head")[0].appendChild(newSS);
// Title Bar
if(typeof theme.titleBarHtml == "string") {
document.getElementById("logo-container").innerHTML = theme.titleBarHtml;
}
else {
render(theme.titleBarHtml,document.getElementById("logo-container"));
}
// Favicon
document.getElementById("metaIcon").setAttribute("href",theme.favicon);
document.getElementById("metaAppleIcon").setAttribute("href",theme.appleTouchIcon);
document.getElementById("metaManifest").setAttribute("href",`themes/${themeId}/site.webmanifest`);
// Theme Color
document.getElementById("theme-color").setAttribute("content",theme.themeColor);
if(themeId == this.defaultTheme) {
document.getElementsByTagName("title")[0].innerText = theme.title;
}
else {
document.getElementsByTagName("title")[0].innerText = `${theme.name} ${this.translation.get("poweredBy")} Chargeprice`;
}
}
getCurrentThemeConfig(){
return this.themes[this.getValidatedTheme()];
}
}
|
#!/bin/sh
edje_cc $@ -id . -fd . blueprint.edc -o Bodhi-Blueprint.edj
|
#!/bin/bash
# ###############################################
# Based on Peter Jemian script from tech-talk
# https://epics.anl.gov/tech-talk/2018/msg00259.php
# ###############################################
#wget https://raw.githubusercontent.com/EPICS-synApps/support/master/assemble_synApps.sh
# edit for local choices
#bash assemble_synApps.sh
# ##########################
# On Windows x64 specify location
# EPICS_BASE in support/configure/EPICS_BASE.windows-x64
# SUPPORT in support/configure/SUPPORT.windows-x64
# ##########################
# ################################
# assemble_synApps.sh modified to AreaDetector 3.2 onley
# ================================
bash assemble_synAppsAD.sh full
cd synApps/support
#sed -i "s/^EPICS_BASE=.*/EPICS_BASE=\/epics\/base-3.15.5/" configure/RELEASE
sed -i "s/^EPICS_BASE=.*/EPICS_BASE=\/epics\/base-7.0.1/" configure/RELEASE
make release
#git clone https://github.com/areaDetector/areaDetector
#cd areaDetector
#git clone https://github.com/areaDetector/ADCore
#git clone https://github.com/areaDetector/ADSupport
#git clone https://github.com/areaDetector/ADSimDetector
#git clone https://github.com/areaDetector/ADProsilica
#git clone https://github.com/areaDetector/ADAndor3
#git clone https://github.com/areaDetector/ADDexela
# Sort out releases ('git checkout master' for most recent)
#git checkout R3-2;
#cd ADCore; git checkout R3-2; cd ..
#cd ADSupport; git checkout R1-4; cd ..
#cd ADSimDetector; git checkout R2-7; cd ..
#cd ADProsilica; git checkout R2-4; cd ..
#cd ADAndor3; git checkout R2-2; cd ..
#cd ADDexela; git checkout R2-1; cd ..
#cd ADDexela; git checkout master; cd ..
# Edit AreaDetector/configure/RELEASE... file system
# ##################################
# Compilation order (areaDetector 3-2)
# Date: 2018-2-7
# ====================================
#seq, ipac, autosave, | caPutLog
#asyn, devIocStats, sscan
#busy, ipUnidac, calc, std
#mca
#areaDetector
#quadEM
# ##################################
# Updating the areaDetector
# =================================
# from directory /epics/areadetector/
# git pull --recurse-submodules
# #########################3
# Git releases
# =======================
# git tag
# git tag -l R4-3* # Match pattern defined by -l
# git show-ref --tags -d
# git diff --name-status Release-2-6 ; difference since R-2-6
# git log origin/master..HEAD ; unpushed changes; similar to 'hg outgoing'
# git diff origin/master..HEAD ; Actual changes
# git log --stat origin/master..HEAD
# git log --branches --not --remotes ; Commits not yet pushed on all branches
# git log --branches --not --remotes --simplify-by-decoration --decorate --oneline ; most recent commit on each branch and branch name
# git checkout -b R7-7
# ***********************
# git config --global alias.ahead "log origin/master..HEAD --oneline" ; git ahead
# ###############################
# AreaDetector recursively cloned
# ===============================
#git clone --recursive https://github.com/areaDetector/areaDetector.git
#git clone https://github.com/epics-modules/quadEM
#cd areaDetector/ADCore; git checkout R3-1; cd ../..
# ... many more modules here
# ##################################
# Updating the areaDetector
# =================================
# from directory /epics/areadetector/
# git pull --recurse-submodules
# ##################################
# Additional synApps components
# ==================================
##darcs get http://www-csr.bessy.de/control/SoftDist/caPutLog/repo/caPutLog
#wget http://www-csr.bessy.de/control/SoftDist/caPutLog/_downloads/caPutLog-3.6.tar.gz; tar -xvzf caPutLog-3.6.tar.gz
#git clone https://github.com/epics-modules/alive
# ######################################################################
# PCIe EVR on powerBrick v.3.0.1, Biffiger Roger (Roger.Biffiger@psi.ch)
# Requires PCI VME support library devlib2
# =========================================
# git clone https://github.com/pauscherrerinstitute/mrfioc2
# git clone https://github.com/kgofron/devlib2
# ################################################
# medm is now (2017-7-15) on github.com/epics-extensions/medm
# ================================================
#git clone https://github.com/epics-extensions/medm; cd /epics/extensions/src/medm; git checkout MEDM3_1_14;
#https://github.com/epics-extensions/medm/archive/MEDM3_1_14.tar.gz
# Need msi from https://epics.anl.gov/extensions/msi/
# ######################################
# EPICS 7 base is cheeky, since submodules need to be pulled into
# epics-base/modules/[ca,database,libcom,normativeTypes,pvData,pvDatabase,pva2pva,pvaClient]
# https://epics.anl.gov/base/R7-0/index.php
# ================================
#git clone -b core/master https://git.launchpad.net/epics-base base-7.0
#cd base-7.0
#git submodule update --init --reference ./
|
package entities
import (
"github.com/edanko/dxf-go/core"
"github.com/stretchr/testify/assert"
"strings"
"testing"
)
func TestSeqEnd(t *testing.T) {
expected := SeqEnd{
BaseEntity: BaseEntity{
On: true,
Visible: true,
},
}
next := core.Tagger(strings.NewReader(" 0\nSEQEND"))
seqend, err := NewSeqEnd(core.TagSlice(core.AllTags(next)))
assert.Nil(t, err)
assert.True(t, expected.Equals(seqend))
assert.False(t, SeqEnd{}.Equals(core.NewStringValue("SEQOTHER")))
assert.True(t, SeqEnd{}.IsSeqEnd())
assert.False(t, SeqEnd{}.HasNestedEntities())
}
|
<gh_stars>1-10
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: cosmos/evidence/query.proto
package types
import (
context "context"
fmt "fmt"
types "github.com/cosmos/cosmos-sdk/codec/types"
query "github.com/cosmos/cosmos-sdk/types/query"
_ "github.com/gogo/protobuf/gogoproto"
grpc1 "github.com/gogo/protobuf/grpc"
proto "github.com/gogo/protobuf/proto"
github_com_tendermint_tendermint_libs_bytes "github.com/tendermint/tendermint/libs/bytes"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
io "io"
math "math"
math_bits "math/bits"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
// QueryEvidenceRequest is the request type for the Query/Evidence RPC method
type QueryEvidenceRequest struct {
EvidenceHash github_com_tendermint_tendermint_libs_bytes.HexBytes `protobuf:"bytes,1,opt,name=evidence_hash,json=evidenceHash,proto3,casttype=github.com/tendermint/tendermint/libs/bytes.HexBytes" json:"evidence_hash,omitempty"`
}
func (m *QueryEvidenceRequest) Reset() { *m = QueryEvidenceRequest{} }
func (m *QueryEvidenceRequest) String() string { return proto.CompactTextString(m) }
func (*QueryEvidenceRequest) ProtoMessage() {}
func (*QueryEvidenceRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_6afffc78347cbc5e, []int{0}
}
func (m *QueryEvidenceRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *QueryEvidenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_QueryEvidenceRequest.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *QueryEvidenceRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_QueryEvidenceRequest.Merge(m, src)
}
func (m *QueryEvidenceRequest) XXX_Size() int {
return m.Size()
}
func (m *QueryEvidenceRequest) XXX_DiscardUnknown() {
xxx_messageInfo_QueryEvidenceRequest.DiscardUnknown(m)
}
var xxx_messageInfo_QueryEvidenceRequest proto.InternalMessageInfo
func (m *QueryEvidenceRequest) GetEvidenceHash() github_com_tendermint_tendermint_libs_bytes.HexBytes {
if m != nil {
return m.EvidenceHash
}
return nil
}
// QueryEvidenceResponse is the response type for the Query/Evidence RPC method
type QueryEvidenceResponse struct {
Evidence *types.Any `protobuf:"bytes,1,opt,name=evidence,proto3" json:"evidence,omitempty"`
}
func (m *QueryEvidenceResponse) Reset() { *m = QueryEvidenceResponse{} }
func (m *QueryEvidenceResponse) String() string { return proto.CompactTextString(m) }
func (*QueryEvidenceResponse) ProtoMessage() {}
func (*QueryEvidenceResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_6afffc78347cbc5e, []int{1}
}
func (m *QueryEvidenceResponse) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *QueryEvidenceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_QueryEvidenceResponse.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *QueryEvidenceResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_QueryEvidenceResponse.Merge(m, src)
}
func (m *QueryEvidenceResponse) XXX_Size() int {
return m.Size()
}
func (m *QueryEvidenceResponse) XXX_DiscardUnknown() {
xxx_messageInfo_QueryEvidenceResponse.DiscardUnknown(m)
}
var xxx_messageInfo_QueryEvidenceResponse proto.InternalMessageInfo
func (m *QueryEvidenceResponse) GetEvidence() *types.Any {
if m != nil {
return m.Evidence
}
return nil
}
// QueryEvidenceRequest is the request type for the Query/AllEvidence RPC method
type QueryAllEvidenceRequest struct {
Req *query.PageRequest `protobuf:"bytes,1,opt,name=req,proto3" json:"req,omitempty"`
}
func (m *QueryAllEvidenceRequest) Reset() { *m = QueryAllEvidenceRequest{} }
func (m *QueryAllEvidenceRequest) String() string { return proto.CompactTextString(m) }
func (*QueryAllEvidenceRequest) ProtoMessage() {}
func (*QueryAllEvidenceRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_6afffc78347cbc5e, []int{2}
}
func (m *QueryAllEvidenceRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *QueryAllEvidenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_QueryAllEvidenceRequest.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *QueryAllEvidenceRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_QueryAllEvidenceRequest.Merge(m, src)
}
func (m *QueryAllEvidenceRequest) XXX_Size() int {
return m.Size()
}
func (m *QueryAllEvidenceRequest) XXX_DiscardUnknown() {
xxx_messageInfo_QueryAllEvidenceRequest.DiscardUnknown(m)
}
var xxx_messageInfo_QueryAllEvidenceRequest proto.InternalMessageInfo
func (m *QueryAllEvidenceRequest) GetReq() *query.PageRequest {
if m != nil {
return m.Req
}
return nil
}
// QueryAllEvidenceResponse is the response type for the Query/AllEvidence RPC method
type QueryAllEvidenceResponse struct {
Evidence []*types.Any `protobuf:"bytes,1,rep,name=evidence,proto3" json:"evidence,omitempty"`
Res *query.PageResponse `protobuf:"bytes,2,opt,name=res,proto3" json:"res,omitempty"`
}
func (m *QueryAllEvidenceResponse) Reset() { *m = QueryAllEvidenceResponse{} }
func (m *QueryAllEvidenceResponse) String() string { return proto.CompactTextString(m) }
func (*QueryAllEvidenceResponse) ProtoMessage() {}
func (*QueryAllEvidenceResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_6afffc78347cbc5e, []int{3}
}
func (m *QueryAllEvidenceResponse) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *QueryAllEvidenceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_QueryAllEvidenceResponse.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *QueryAllEvidenceResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_QueryAllEvidenceResponse.Merge(m, src)
}
func (m *QueryAllEvidenceResponse) XXX_Size() int {
return m.Size()
}
func (m *QueryAllEvidenceResponse) XXX_DiscardUnknown() {
xxx_messageInfo_QueryAllEvidenceResponse.DiscardUnknown(m)
}
var xxx_messageInfo_QueryAllEvidenceResponse proto.InternalMessageInfo
func (m *QueryAllEvidenceResponse) GetEvidence() []*types.Any {
if m != nil {
return m.Evidence
}
return nil
}
func (m *QueryAllEvidenceResponse) GetRes() *query.PageResponse {
if m != nil {
return m.Res
}
return nil
}
func init() {
proto.RegisterType((*QueryEvidenceRequest)(nil), "cosmos.evidence.QueryEvidenceRequest")
proto.RegisterType((*QueryEvidenceResponse)(nil), "cosmos.evidence.QueryEvidenceResponse")
proto.RegisterType((*QueryAllEvidenceRequest)(nil), "cosmos.evidence.QueryAllEvidenceRequest")
proto.RegisterType((*QueryAllEvidenceResponse)(nil), "cosmos.evidence.QueryAllEvidenceResponse")
}
func init() { proto.RegisterFile("cosmos/evidence/query.proto", fileDescriptor_6afffc78347cbc5e) }
var fileDescriptor_6afffc78347cbc5e = []byte{
// 400 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xc1, 0x6a, 0xdb, 0x30,
0x18, 0xc7, 0xed, 0x85, 0x8d, 0xa0, 0x64, 0x0c, 0x44, 0xc6, 0x12, 0x8f, 0x79, 0xc3, 0xb0, 0x91,
0xb1, 0x45, 0x1a, 0x59, 0x0f, 0xbd, 0x26, 0xd0, 0x36, 0xbd, 0xb5, 0x3e, 0xb6, 0x94, 0x62, 0xc7,
0xaa, 0x6d, 0xea, 0x48, 0x8e, 0x25, 0x97, 0xb8, 0x4f, 0xd1, 0xc7, 0x2a, 0x3d, 0xe5, 0xd8, 0x53,
0x29, 0xc9, 0x5b, 0xf4, 0x54, 0x2c, 0x5b, 0x6d, 0x48, 0x0c, 0xe9, 0x29, 0x5f, 0xac, 0x9f, 0xff,
0xbf, 0x4f, 0x9f, 0x64, 0xf0, 0x75, 0xcc, 0xf8, 0x84, 0x71, 0x4c, 0xae, 0x42, 0x8f, 0xd0, 0x31,
0xc1, 0xd3, 0x94, 0x24, 0x19, 0x8a, 0x13, 0x26, 0x18, 0xfc, 0x54, 0x2c, 0x22, 0xb5, 0x68, 0x7c,
0x2b, 0x69, 0x09, 0xe1, 0xd8, 0xf1, 0x43, 0xea, 0x88, 0x90, 0xd1, 0x82, 0x37, 0x5a, 0x3e, 0xf3,
0x99, 0x2c, 0x71, 0x5e, 0x95, 0x4f, 0x3b, 0x3e, 0x63, 0x7e, 0x44, 0xb0, 0xfc, 0xe7, 0xa6, 0x17,
0xd8, 0xa1, 0xa5, 0xc0, 0x4a, 0x41, 0xeb, 0x38, 0x8f, 0xda, 0x2b, 0x05, 0x36, 0x99, 0xa6, 0x84,
0x0b, 0x78, 0x06, 0x3e, 0x2a, 0xe7, 0x79, 0xe0, 0xf0, 0xa0, 0xad, 0xff, 0xd0, 0xbb, 0xcd, 0xe1,
0xee, 0xd3, 0xc3, 0xf7, 0x1d, 0x3f, 0x14, 0x41, 0xea, 0xa2, 0x31, 0x9b, 0x60, 0x41, 0xa8, 0x47,
0x92, 0x49, 0x48, 0xc5, 0x6a, 0x19, 0x85, 0x2e, 0xc7, 0x6e, 0x26, 0x08, 0x47, 0x23, 0x32, 0x1b,
0xe6, 0x85, 0xdd, 0x54, 0x71, 0x23, 0x87, 0x07, 0xd6, 0x21, 0xf8, 0xbc, 0xa6, 0xe5, 0x31, 0xa3,
0x9c, 0xc0, 0x7f, 0xa0, 0xae, 0x40, 0xa9, 0x6c, 0xf4, 0x5b, 0xa8, 0xe8, 0x1e, 0xa9, 0xee, 0xd1,
0x80, 0x66, 0xf6, 0x0b, 0x65, 0xed, 0x83, 0x2f, 0x32, 0x6a, 0x10, 0x45, 0xeb, 0x9b, 0xf8, 0x03,
0x6a, 0x09, 0x99, 0x96, 0x39, 0x1d, 0x54, 0xce, 0xb2, 0x98, 0xef, 0x91, 0xe3, 0x2b, 0xce, 0xce,
0x29, 0xeb, 0x1a, 0xb4, 0x37, 0x73, 0x2a, 0xbb, 0xaa, 0x6d, 0xef, 0x0a, 0xfe, 0xcd, 0xd5, 0xbc,
0xfd, 0x4e, 0xaa, 0x8d, 0x2a, 0x75, 0x11, 0x9d, 0xbb, 0x79, 0xff, 0x4e, 0x07, 0xef, 0xa5, 0x1c,
0x9e, 0x82, 0xba, 0xb2, 0xc3, 0x9f, 0x68, 0xed, 0xf4, 0x51, 0xd5, 0x51, 0x19, 0xbf, 0xb6, 0x61,
0x85, 0xc9, 0xd2, 0xa0, 0x07, 0x1a, 0x2b, 0xbb, 0x83, 0xdd, 0xea, 0x17, 0x37, 0x07, 0x69, 0xfc,
0x7e, 0x03, 0xa9, 0x2c, 0xc3, 0x83, 0xdb, 0x85, 0xa9, 0xcf, 0x17, 0xa6, 0xfe, 0xb8, 0x30, 0xf5,
0x9b, 0xa5, 0xa9, 0xcd, 0x97, 0xa6, 0x76, 0xbf, 0x34, 0xb5, 0x93, 0xde, 0xca, 0xcd, 0x29, 0xef,
0x71, 0xf1, 0xd3, 0xe3, 0xde, 0x25, 0x9e, 0xbd, 0x7e, 0x02, 0x22, 0x8b, 0x09, 0x77, 0x3f, 0xc8,
0xd9, 0xfe, 0x7f, 0x0e, 0x00, 0x00, 0xff, 0xff, 0x6a, 0xbc, 0xe1, 0xb4, 0x22, 0x03, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// QueryClient is the client API for Query service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type QueryClient interface {
// Evidence queries evidence based on evidence hash
Evidence(ctx context.Context, in *QueryEvidenceRequest, opts ...grpc.CallOption) (*QueryEvidenceResponse, error)
// AllEvidence queries all evidence
AllEvidence(ctx context.Context, in *QueryAllEvidenceRequest, opts ...grpc.CallOption) (*QueryAllEvidenceResponse, error)
}
type queryClient struct {
cc grpc1.ClientConn
}
func NewQueryClient(cc grpc1.ClientConn) QueryClient {
return &queryClient{cc}
}
func (c *queryClient) Evidence(ctx context.Context, in *QueryEvidenceRequest, opts ...grpc.CallOption) (*QueryEvidenceResponse, error) {
out := new(QueryEvidenceResponse)
err := c.cc.Invoke(ctx, "/cosmos.evidence.Query/Evidence", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *queryClient) AllEvidence(ctx context.Context, in *QueryAllEvidenceRequest, opts ...grpc.CallOption) (*QueryAllEvidenceResponse, error) {
out := new(QueryAllEvidenceResponse)
err := c.cc.Invoke(ctx, "/cosmos.evidence.Query/AllEvidence", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// QueryServer is the server API for Query service.
type QueryServer interface {
// Evidence queries evidence based on evidence hash
Evidence(context.Context, *QueryEvidenceRequest) (*QueryEvidenceResponse, error)
// AllEvidence queries all evidence
AllEvidence(context.Context, *QueryAllEvidenceRequest) (*QueryAllEvidenceResponse, error)
}
// UnimplementedQueryServer can be embedded to have forward compatible implementations.
type UnimplementedQueryServer struct {
}
func (*UnimplementedQueryServer) Evidence(ctx context.Context, req *QueryEvidenceRequest) (*QueryEvidenceResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Evidence not implemented")
}
func (*UnimplementedQueryServer) AllEvidence(ctx context.Context, req *QueryAllEvidenceRequest) (*QueryAllEvidenceResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method AllEvidence not implemented")
}
func RegisterQueryServer(s grpc1.Server, srv QueryServer) {
s.RegisterService(&_Query_serviceDesc, srv)
}
func _Query_Evidence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(QueryEvidenceRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(QueryServer).Evidence(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/cosmos.evidence.Query/Evidence",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(QueryServer).Evidence(ctx, req.(*QueryEvidenceRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Query_AllEvidence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(QueryAllEvidenceRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(QueryServer).AllEvidence(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/cosmos.evidence.Query/AllEvidence",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(QueryServer).AllEvidence(ctx, req.(*QueryAllEvidenceRequest))
}
return interceptor(ctx, in, info, handler)
}
var _Query_serviceDesc = grpc.ServiceDesc{
ServiceName: "cosmos.evidence.Query",
HandlerType: (*QueryServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Evidence",
Handler: _Query_Evidence_Handler,
},
{
MethodName: "AllEvidence",
Handler: _Query_AllEvidence_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "cosmos/evidence/query.proto",
}
func (m *QueryEvidenceRequest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *QueryEvidenceRequest) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *QueryEvidenceRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if len(m.EvidenceHash) > 0 {
i -= len(m.EvidenceHash)
copy(dAtA[i:], m.EvidenceHash)
i = encodeVarintQuery(dAtA, i, uint64(len(m.EvidenceHash)))
i--
dAtA[i] = 0xa
}
return len(dAtA) - i, nil
}
func (m *QueryEvidenceResponse) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *QueryEvidenceResponse) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *QueryEvidenceResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.Evidence != nil {
{
size, err := m.Evidence.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintQuery(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
}
return len(dAtA) - i, nil
}
func (m *QueryAllEvidenceRequest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *QueryAllEvidenceRequest) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *QueryAllEvidenceRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.Req != nil {
{
size, err := m.Req.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintQuery(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
}
return len(dAtA) - i, nil
}
func (m *QueryAllEvidenceResponse) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *QueryAllEvidenceResponse) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *QueryAllEvidenceResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.Res != nil {
{
size, err := m.Res.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintQuery(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x12
}
if len(m.Evidence) > 0 {
for iNdEx := len(m.Evidence) - 1; iNdEx >= 0; iNdEx-- {
{
size, err := m.Evidence[iNdEx].MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintQuery(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
}
}
return len(dAtA) - i, nil
}
func encodeVarintQuery(dAtA []byte, offset int, v uint64) int {
offset -= sovQuery(v)
base := offset
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return base
}
func (m *QueryEvidenceRequest) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = len(m.EvidenceHash)
if l > 0 {
n += 1 + l + sovQuery(uint64(l))
}
return n
}
func (m *QueryEvidenceResponse) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.Evidence != nil {
l = m.Evidence.Size()
n += 1 + l + sovQuery(uint64(l))
}
return n
}
func (m *QueryAllEvidenceRequest) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.Req != nil {
l = m.Req.Size()
n += 1 + l + sovQuery(uint64(l))
}
return n
}
func (m *QueryAllEvidenceResponse) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if len(m.Evidence) > 0 {
for _, e := range m.Evidence {
l = e.Size()
n += 1 + l + sovQuery(uint64(l))
}
}
if m.Res != nil {
l = m.Res.Size()
n += 1 + l + sovQuery(uint64(l))
}
return n
}
func sovQuery(x uint64) (n int) {
return (math_bits.Len64(x|1) + 6) / 7
}
func sozQuery(x uint64) (n int) {
return sovQuery(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (m *QueryEvidenceRequest) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowQuery
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: QueryEvidenceRequest: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: QueryEvidenceRequest: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field EvidenceHash", wireType)
}
var byteLen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowQuery
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
byteLen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if byteLen < 0 {
return ErrInvalidLengthQuery
}
postIndex := iNdEx + byteLen
if postIndex < 0 {
return ErrInvalidLengthQuery
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.EvidenceHash = append(m.EvidenceHash[:0], dAtA[iNdEx:postIndex]...)
if m.EvidenceHash == nil {
m.EvidenceHash = []byte{}
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipQuery(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthQuery
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthQuery
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *QueryEvidenceResponse) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowQuery
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: QueryEvidenceResponse: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: QueryEvidenceResponse: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Evidence", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowQuery
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthQuery
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthQuery
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Evidence == nil {
m.Evidence = &types.Any{}
}
if err := m.Evidence.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipQuery(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthQuery
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthQuery
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *QueryAllEvidenceRequest) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowQuery
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: QueryAllEvidenceRequest: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: QueryAllEvidenceRequest: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Req", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowQuery
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthQuery
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthQuery
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Req == nil {
m.Req = &query.PageRequest{}
}
if err := m.Req.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipQuery(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthQuery
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthQuery
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *QueryAllEvidenceResponse) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowQuery
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: QueryAllEvidenceResponse: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: QueryAllEvidenceResponse: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Evidence", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowQuery
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthQuery
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthQuery
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Evidence = append(m.Evidence, &types.Any{})
if err := m.Evidence[len(m.Evidence)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Res", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowQuery
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthQuery
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthQuery
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.Res == nil {
m.Res = &query.PageResponse{}
}
if err := m.Res.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipQuery(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthQuery
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthQuery
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipQuery(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
depth := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowQuery
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowQuery
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
case 1:
iNdEx += 8
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowQuery
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if length < 0 {
return 0, ErrInvalidLengthQuery
}
iNdEx += length
case 3:
depth++
case 4:
if depth == 0 {
return 0, ErrUnexpectedEndOfGroupQuery
}
depth--
case 5:
iNdEx += 4
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
if iNdEx < 0 {
return 0, ErrInvalidLengthQuery
}
if depth == 0 {
return iNdEx, nil
}
}
return 0, io.ErrUnexpectedEOF
}
var (
ErrInvalidLengthQuery = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowQuery = fmt.Errorf("proto: integer overflow")
ErrUnexpectedEndOfGroupQuery = fmt.Errorf("proto: unexpected end of group")
)
|
#include <iostream>
class Timeline {
private:
bool stopped;
double speed;
public:
// Constructor to initialize the timeline with default speed and stopped state
Timeline() : stopped(true), speed(1.0) {}
// Method to check if the timeline is in a stopped state
bool hasStopped() const {
return stopped;
}
// Method to set the speed of the timeline
void setSpeed(double newSpeed) {
speed = newSpeed;
if (speed == 0.0) {
stopped = true;
} else {
stopped = false;
}
}
};
int main() {
// Test the Timeline class
Timeline timeline;
std::cout << "Timeline is stopped: " << std::boolalpha << timeline.hasStopped() << std::endl;
timeline.setSpeed(2.0);
std::cout << "Timeline is stopped: " << std::boolalpha << timeline.hasStopped() << std::endl;
timeline.setSpeed(-1.5);
std::cout << "Timeline is stopped: " << std::boolalpha << timeline.hasStopped() << std::endl;
return 0;
} |
<reponame>TachoMex/ant<gh_stars>1-10
# frozen_string_literal: true
module Ant
module Bot
# Object that wraps a command, it is analogus to a route definition.
# it currently only gets a param list, but it will be extended to a more
# complex DSL.
class Command
attr_reader :block
# Receives a list of params as symbols and the lambda with the block.
def initialize(params, block)
@params = params
@block = block
end
# Calls the block with the params list. Fails if there is a missing param
def execute(params)
raise 'NotReady' unless ready?(params)
@block.call(params)
end
# Checks if the params object given contains all the needed values
def ready?(current_params)
@params.all? { |key| current_params.key?(key) }
end
# Finds the first empty param from the given parameter
def next_missing_param(current_params)
@params.find { |key| !current_params.key?(key) }
end
end
# Wraps a collection of commands.
class CommandDefinition
def initialize
@commands = {}
end
# Stores an operation definition
def register_command(name, params, block)
@commands[name] = Command.new(params, block)
end
# Returns a command with the name
def [](name)
@commands[name]
end
end
end
end
|
### System Preferences > General
# Appearance: Dark
defaults write NSGlobalDomain AppleInterfaceStyle -string "Dark"
# Accent color: Blue
defaults write NSGlobalDomain AppleAquaColorVariant -int 1
# Highlight color: "Blue"
defaults write NSGlobalDomain AppleHighlightColor -string '0.780400 0.815700 0.858800'
# Automatically hide and show the menu bar
defaults write NSGlobalDomain _HIHideMenuBar -bool false
# Sidebar icon size: Medium
defaults write NSGlobalDomain NSTableViewDefaultSizeMode -int 2
# Show scroll bars: "When scrolling"
defaults write NSGlobalDomain AppleShowScrollBars -string WhenScrolling
# Click in the scroll bar to "Jump to the spot that's clicked"
defaults write NSGlobalDomain AppleScrollerPagingBehavior -int 1
# Default web browser: Safari
# TODO
# Ask to keep changes when closing documents
defaults write NSGlobalDomain NSCloseAlwaysConfirmsChanges -bool false
# [x] Use LCD font smoothing when available
# TODO
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-common/7-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-common/7-512+0+512-NER-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_named_entities_first_half_quarter --eval_function penultimate_quarter_eval |
<gh_stars>0
import React from "react";
import { Route } from "react-router-dom";
import Navbar from "../components/navbar";
import { OverviewLayout, PatientDetailLayout, PatientListLayout, UnauthenticatedLayout } from "../layouts";
import style from "./app.module.less";
import { useAppContext } from "../contexts/app-context";
export const App = () => {
const { user, authState, loading } = useAppContext();
if (loading) return <div>loading...</div>;
if (!loading && !authState?.isAuthenticated) return <UnauthenticatedLayout loginUrl={authState?.loginUrl || "/"} />;
return (
<>
{user && <Navbar user={user} logoutUrl={authState?.logoutUrl || "/"} />}
<div className={style.content}>
<Route exact path="/oversikt">
<OverviewLayout />
</Route>
<Route exact path="/pasient">
<PatientListLayout />
</Route>
<Route exact path="/pasient/:id">
<PatientDetailLayout />
</Route>
<Route exact path="/timeplan">
<h1>timeplan</h1>
</Route>
<Route exact path="/inbox">
<h1>inbox</h1>
</Route>
<Route exact path="/instillinger">
<h1>instillinger</h1>
</Route>
</div>
</>
);
};
export default App;
|
#!/usr/bin/env bash
brew update;
brew install lazydocker
|
<gh_stars>0
import chalk from 'chalk';
import _ from 'lodash';
import config from '../config.json';
import { generateServerMessage } from './lib/utilities';
import { initialize } from './modules/initialize';
/**
* Configuration.
*
* @since 1.0.0
*/
const configSettingsTimeZone = _.get(config, 'settings.time-zone');
const configSettingsLogLevel = _.get(config, 'settings.log-level');
const configSettingsMemberfulApiKey = _.get(config, 'settings.memberful-api-key');
const configSettingsMemberfulSubdomain = _.get(config, 'settings.memberful-subdomain');
/**
* Configuration pre-checks.
*
* @since 1.0.0
*/
if (
(!_.isString(configSettingsTimeZone) || _.isEmpty(configSettingsTimeZone))
|| !_.includes([10, 20, 30, 40], configSettingsLogLevel)
) {
if (!_.isString(configSettingsTimeZone) || _.isEmpty(configSettingsTimeZone)) {
generateServerMessage('"settings.time-zone" is not configured', true, 1);
}
if (!_.includes([10, 20, 30, 40], configSettingsLogLevel)) {
generateServerMessage('"settings.log-level" is not configured or is invalid', true, 1);
}
if (!_.isString(configSettingsMemberfulApiKey) || _.isEmpty(configSettingsMemberfulApiKey)) {
generateServerMessage('"settings.memberful-api-key" is not configured', true, 1);
}
if (!_.isString(configSettingsMemberfulSubdomain) || _.isEmpty(configSettingsMemberfulSubdomain)) {
generateServerMessage('"settings.memberful-subdomain" is not configured', true, 1);
}
} else {
generateServerMessage(
[
chalk.green('Server is ready!'),
'Starting sync processes',
].join(' '),
false,
);
/**
* Initialize.
*
* @since 1.0.0
*/
initialize();
}
/**
* Capture signal interruption.
*
* @since 1.0.0
*/
process.on('SIGINT', () => {
generateServerMessage('Stopping server', false, 0);
});
|
<gh_stars>0
var _Promise = typeof Promise === 'undefined' ? require('es6-promise').Promise : Promise;
var limitPromises = require('./limitPromises');
describe('limitPromises', function () {
var pending = 0;
function fn() {
pending++;
return new _Promise(function (resolve) {
return setTimeout(resolve, 10);
}).then(function () {
return pending--;
});
}
it('should run at most N promises at the same time', function () {
var limit = limitPromises(4);
var fn2 = limit(fn);
var result = _Promise.all([fn2(), fn2(), fn2(), fn2(), fn2(), fn2(), fn2(), fn2(), fn2(), fn2()]);
expect(pending).toBe(4);
setTimeout(function () {
expect(pending).toBe(4);
}, 10);
return result.then(function () {
expect(pending).toBe(0);
});
});
it('should accept Infinity as limit', function () {
var limit = limitPromises(Infinity);
var fn2 = limit(fn);
var result = _Promise.all([fn2(), fn2(), fn2(), fn2(), fn2(), fn2(), fn2(), fn2(), fn2(), fn2()]);
expect(pending).toBe(10);
return result.then(function () {
expect(pending).toBe(0);
});
});
});
//# sourceMappingURL=limitPromises.test.js.map |
a = [3,4,6,2,1]
for x in a:
print(x)
a.sort()
for x in a:
print(x)
Output:
1
2
3
4
6 |
def count_in_range(lst, a, b):
return len([x for x in lst if a < x < b])
result = count_in_range([4, 10, 8, 16, 5], 5, 11)
print(result) |
<gh_stars>100-1000
// Copyright (C) 2019. Huawei Technologies Co., Ltd. All rights reserved.
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
// WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#ifndef _H_FCFCOPTIMIZER
#define _H_FCFCOPTIMIZER
#include "OPOptimizer.hpp"
class FCFCOptimizer : public OPOptimizer {
bool optimize(ModelSpec *spec) override
{
bool hasOptimized = false;
hasOptimized |= horizontal_optimize(spec);
hasOptimized |= vertical_optimize(spec);
return hasOptimized;
}
template <typename T>
void mmm_nt(T *A, T *B, T *bias, T *C, int m, int n, int k)
{
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
T value = 0;
for (int z = 0; z < k; z++) {
value += A[i * k + z] * B[j * k + z];
}
if (bias != nullptr) {
value += bias[i * n + j];
}
C[i * n + j] = value;
}
}
}
bool vertical_optimize(ModelSpec *spec)
{
bool hasOptimized = false;
for (int i = 0; i < spec->num_operator_specs; i++) {
if (spec->ops[i].type == OT_FC && spec->ops[i].ps.fc_spec.num_slices == 1) {
std::vector<std::pair<int, int>> nextOpIndexes = searchOperatorIndexByInput(
spec, spec->ops[i].output_tensors_name[0], i, spec->num_operator_specs);
// 1 -> N || next operator not FC
if (nextOpIndexes.size() != 1 || spec->ops[nextOpIndexes[0].first].type != OT_FC) {
continue;
}
// y = Wa2 * (Wa1 * x + b1) + b2
// y = Wa3 * x + b3
// Wa3 = Wa2 * Wa1, b3 = Wa2 * b1 + b2
int a1_id = searchWeightIndex(spec, spec->ops[i].name);
U32 a1_m = spec->ops[i].ps.fc_spec.num_outputs;
U32 a1_k = spec->ws[a1_id].bytes_of_weight / bytesOf(spec->ws[a1_id].mdt) / a1_m;
int a2_id = searchWeightIndex(spec, spec->ops[nextOpIndexes[0].first].name);
U32 a2_m = spec->ops[nextOpIndexes[0].first].ps.fc_spec.num_outputs;
U32 a2_k = spec->ws[a2_id].bytes_of_weight / bytesOf(spec->ws[a2_id].mdt) / a2_m;
CHECK_REQUIREMENT(a2_k == a1_m);
int a3_id = a2_id;
U32 a3_m = a2_m;
U32 a3_k = a1_k;
U32 a3_size = a3_m * a3_k * bytesOf(spec->ws[a3_id].mdt);
U32 b3_size = a3_m * bytesOf(spec->ws[a3_id].mdt);
U8 *a3 = (U8 *)mt_new_storage(a3_size);
U8 *b3 = (U8 *)mt_new_storage(b3_size);
mmm_nt<F32>((F32 *)spec->ws[a2_id].weight, (F32 *)spec->ws[a1_id].weight, nullptr,
(F32 *)a3, a3_m, a3_k, a2_k);
mmm_nt<F32>((F32 *)spec->ws[a2_id].weight, (F32 *)spec->ws[a1_id].vec,
(F32 *)spec->ws[a2_id].vec, (F32 *)b3, a3_m, 1, a2_k);
//erase first fc parameter
if (spec->ws[a1_id].weight != nullptr) {
spec->ws[a1_id].bytes_of_weight = 0;
if (outOfFileMapRange(spec->ws[a1_id].weight, spec->mfd)) {
delete spec->ws[a1_id].weight;
}
spec->ws[a1_id].weight = nullptr;
}
if (spec->ws[a1_id].vec != nullptr) {
spec->ws[a1_id].bytes_of_vec = 0;
if (outOfFileMapRange(spec->ws[a1_id].vec, spec->mfd)) {
delete spec->ws[a1_id].vec;
}
spec->ws[a1_id].vec = nullptr;
}
str_copy(spec->ops[nextOpIndexes[0].first].input_tensors_name[0],
spec->ops[i].input_tensors_name[0], NAME_LEN);
setOperatorInvalid(spec, i);
if (spec->ws[a2_id].weight != nullptr &&
outOfFileMapRange(spec->ws[a2_id].weight, spec->mfd)) {
delete spec->ws[a2_id].weight;
}
if (spec->ws[a2_id].vec != nullptr &&
outOfFileMapRange(spec->ws[a2_id].vec, spec->mfd)) {
delete spec->ws[a2_id].vec;
}
spec->ws[a2_id].bytes_of_weight = a3_size;
spec->ws[a2_id].weight = a3;
spec->ws[a2_id].bytes_of_vec = b3_size;
spec->ws[a2_id].vec = b3;
hasOptimized = true;
}
}
return hasOptimized;
}
bool horizontal_optimize(ModelSpec *spec)
{
const int queryNum = 1;
OperatorType queryOps[queryNum] = {OT_FC};
bool hasOptimized = false;
for (int i = 1; i < spec->num_operator_specs; i++) {
if (spec->ops[i].type == OT_FC) {
int curOpIndex = i;
int prevOpIndex =
searchOperatorIndexBackward(spec, curOpIndex - 1, queryOps, queryNum);
if (prevOpIndex == -1) {
continue;
}
if (strncmp(spec->ops[curOpIndex].input_tensors_name[0],
spec->ops[prevOpIndex].input_tensors_name[0], NAME_LEN)) {
continue;
}
int prevWeightIndex = searchWeightIndex(spec, spec->ops[prevOpIndex].name);
int curWeightIndex = searchWeightIndex(spec, spec->ops[curOpIndex].name);
CHECK_REQUIREMENT(prevWeightIndex != -1);
CHECK_REQUIREMENT(curWeightIndex != -1);
CHECK_REQUIREMENT(spec->ws[prevWeightIndex].mdt == DT_F32);
CHECK_REQUIREMENT(spec->ws[curWeightIndex].mdt == DT_F32);
U32 weightSize = spec->ws[prevWeightIndex].bytes_of_weight +
spec->ws[curWeightIndex].bytes_of_weight;
U8 *weight = (U8 *)mt_new_storage(weightSize);
memcpy(weight, spec->ws[prevWeightIndex].weight,
spec->ws[prevWeightIndex].bytes_of_weight);
memcpy(weight + spec->ws[prevWeightIndex].bytes_of_weight,
spec->ws[curWeightIndex].weight, spec->ws[curWeightIndex].bytes_of_weight);
U32 vecSize = sizeof(F32) *
(spec->ops[prevOpIndex].ps.fc_spec.num_outputs +
spec->ops[curOpIndex].ps.fc_spec.num_outputs);
U8 *vec = (U8 *)mt_new_storage(vecSize);
U8 *ptr = vec;
if (spec->ws[prevWeightIndex].bytes_of_vec == 0) {
memset(ptr, 0, sizeof(F32) * (spec->ops[prevOpIndex].ps.fc_spec.num_outputs));
} else {
CHECK_REQUIREMENT(sizeof(F32) * (spec->ops[prevOpIndex].ps.fc_spec.num_outputs) ==
spec->ws[prevWeightIndex].bytes_of_vec);
memcpy(
ptr, spec->ws[prevWeightIndex].vec, spec->ws[prevWeightIndex].bytes_of_vec);
}
ptr = vec + sizeof(F32) * (spec->ops[prevOpIndex].ps.fc_spec.num_outputs);
if (spec->ws[curWeightIndex].bytes_of_vec == 0) {
memset(ptr, 0, sizeof(F32) * (spec->ops[curOpIndex].ps.fc_spec.num_outputs));
} else {
CHECK_REQUIREMENT(sizeof(F32) * (spec->ops[curOpIndex].ps.fc_spec.num_outputs) ==
spec->ws[curWeightIndex].bytes_of_vec);
memcpy(ptr, spec->ws[curWeightIndex].vec, spec->ws[curWeightIndex].bytes_of_vec);
}
if (spec->ws[prevWeightIndex].weight != nullptr) {
spec->ws[prevWeightIndex].bytes_of_weight = 0;
if (outOfFileMapRange(spec->ws[prevWeightIndex].weight, spec->mfd)) {
delete spec->ws[prevWeightIndex].weight;
}
spec->ws[prevWeightIndex].weight = nullptr;
}
if (spec->ws[prevWeightIndex].vec != nullptr) {
spec->ws[prevWeightIndex].bytes_of_vec = 0;
if (outOfFileMapRange(spec->ws[prevWeightIndex].vec, spec->mfd)) {
delete spec->ws[prevWeightIndex].vec;
}
spec->ws[prevWeightIndex].vec = nullptr;
}
if (spec->ws[curWeightIndex].weight != nullptr) {
spec->ws[curWeightIndex].bytes_of_weight = 0;
if (outOfFileMapRange(spec->ws[curWeightIndex].weight, spec->mfd)) {
delete spec->ws[curWeightIndex].weight;
}
spec->ws[curWeightIndex].weight = nullptr;
}
if (spec->ws[curWeightIndex].vec != nullptr) {
spec->ws[curWeightIndex].bytes_of_vec = 0;
if (outOfFileMapRange(spec->ws[curWeightIndex].vec, spec->mfd)) {
delete spec->ws[curWeightIndex].vec;
}
spec->ws[curWeightIndex].vec = nullptr;
}
// FC params
spec->ops[prevOpIndex].ps.fc_spec.num_slices++;
U32 slices = spec->ops[prevOpIndex].ps.fc_spec.num_slices;
CHECK_REQUIREMENT(
slices <= sizeof(spec->ops[prevOpIndex].ps.fc_spec.slice_point) / sizeof(int));
spec->ops[prevOpIndex].ps.fc_spec.slice_point[slices - 1] =
spec->ops[curOpIndex].ps.fc_spec.num_outputs;
spec->ops[prevOpIndex].ps.fc_spec.num_outputs +=
spec->ops[curOpIndex].ps.fc_spec.num_outputs;
// operator spec
spec->ops[prevOpIndex].num_outputs = slices;
I8 **names = (I8 **)mt_new_storage(slices * sizeof(I8 *));
for (U32 j = 0; j < slices - 1; j++) {
names[j] = spec->ops[prevOpIndex].output_tensors_name[j];
}
names[slices - 1] = spec->ops[curOpIndex].output_tensors_name[0];
delete spec->ops[prevOpIndex].output_tensors_name;
delete spec->ops[curOpIndex].output_tensors_name;
spec->ops[curOpIndex].output_tensors_name = nullptr;
spec->ops[curOpIndex].num_outputs = 0;
spec->ops[prevOpIndex].output_tensors_name = names;
// weight spec
spec->ws[prevWeightIndex].bytes_of_weight = weightSize;
spec->ws[prevWeightIndex].weight = weight;
spec->ws[prevWeightIndex].bytes_of_vec = vecSize;
spec->ws[prevWeightIndex].vec = vec;
hasOptimized = true;
setOperatorInvalid(spec, curOpIndex);
i = curOpIndex;
}
}
return hasOptimized;
}
};
#endif
|
#!/usr/bin/env python
# oculus.py
# Subscribes to camera output, publishes data about what it sees.
# Determines what to look for based on what is being subscribed to.
import rospy
from cv_bridge import CvBridge
from sensor_msgs.msg import Image, CompressedImage
from riptide_vision import RiptideVision
from gate_processor import GateProcessor
from pole_processor import PoleProcessor
from riptide_msgs.msg import TaskAlignment, BoundingBox
from geometry_msgs.msg import Point
import time
class Oculus:
# Class constants
SHAKE_THRESHOLD = 20 # Allowable amount of difference between positions
MAX_SAMPLES = 5 # Number of previous positions to store for averaging
DEBUG = True # Setting to true will publish processed images on debug topic
MODE_NONE = -1
MODE_GATE = 0 # Detect gate mode
MODE_POLE = 1 # Detect pole mode
def __init__(self):
self.image_pub = rospy.Publisher("/forward/processed/compressed", CompressedImage, queue_size=1)
self.alignment_pub = rospy.Publisher("/task/gate/alignment", TaskAlignment, queue_size=1)
self.fwd_sub = rospy.Subscriber("/forward/image_raw", Image, self.image_callback, queue_size=1)
self.bridge = CvBridge()
self.prev_pos = list()
self.mode = self.MODE_NONE
self.gate_processor = GateProcessor()
self.pole_processor = PoleProcessor()
def update_mode(self, mode, topic=None):
self.alignment_pub.unregister()
if (mode is not self.MODE_NONE):
self.alignment_pub = rospy.Publisher(topic, TaskAlignment, queue_size=1)
print "Publishing on " + topic + "."
self.mode = mode
print "Switched to mode " + str(mode)
# Called whenever a camera frame is availale.
def image_callback(self, data):
# Convert image message to something OpenCV can deal with
cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8")
pos = None
bbox = None
# Process the image based on which topic is being subscribed to
# Set the object data pub to publish on the correct topic
# Use mode to avoid creating a new publisher each time
if (self.gate_processor.IsConnected()):
if (self.mode != self.MODE_GATE):
self.update_mode(self.MODE_GATE, "task/gate/alignment")
t = time.time()
pos, bbox = self.gate_processor.Process(cv_image, self.image_pub)
elif (self.pole_processor.IsConnected()):
if (self.mode != self.MODE_POLE):
self.update_mode(self.MODE_POLE, "task/pole/alignment")
pos, bbox = self.pole_processor.Process(cv_image, self.image_pub)
else:
if self.mode is not self.MODE_NONE:
self.update_mode(self.MODE_NONE)
self.reset_processor()
if self.mode is not self.MODE_NONE:
self.process_alignment_data(pos, bbox)
# Function: reset_processor
# Parameters:
# self
# Description:
# Deletes any stored information in the processor to allow it to switch
# into a different processing mode with a clean slate
def reset_processor(self):
del self.prev_pos[:]
# Function: pos_is_valid
# Parameters:
# self
# pos: Position to check
# Description:
# Returns whether or not a position is within the given
# SHAKE_THRESHOLD. Prevents sporatic false positives from skewing
# the average position.
def pos_is_valid(self, pos):
x = True
y = True
z = True
if (len(self.prev_pos) > 0):
x = abs(self.prev_pos[0].x - pos.x) < self.SHAKE_THRESHOLD
y = abs(self.prev_pos[0].y - pos.y) < self.SHAKE_THRESHOLD
z = abs(self.prev_pos[0].z - pos.z) < self.SHAKE_THRESHOLD
return x and y and z
# Function: get_new_average_pos
# Parameters:
# self
# new_pos: Position to be added to the average
# Description:
# Returns an average position of *new_pos* and the previous
# *MAX_SAMPLES* positions
def get_new_average_pos(self, new_pos):
avg_pos = Point()
length = len(self.prev_pos)
if (length == self.MAX_SAMPLES):
self.prev_pos.pop()
self.prev_pos.insert(0, new_pos)
length += 1
xt = 0
yt = 0
zt = 0
for p in self.prev_pos:
xt += p.x
yt += p.y
zt += p.z
avg_pos.x = xt / length
avg_pos.y = yt / length
avg_pos.z = zt / length
return avg_pos
# Function: process_object_data
# Parameters:
# self
# pos: Position of the object
# Description:
# Publishes an object data message using the *alignment_pub*.
# *pos* is used to generate a new average position that is added to
# the message.
def process_alignment_data(self, pos, bbox):
align_msg = TaskAlignment()
align_msg.header.stamp = rospy.Time.now() # Timestamp
# Check if we saw the object
# If yes, add the new position to the average and publish
# If no, set visible to false and publish
if (pos is not None):
align_msg.visible = True
if (self.pos_is_valid(pos)):
align_msg.relative_pos = self.get_new_average_pos(pos)
if bbox is not None:
align_msg.bbox = bbox
else:
self.reset_processor()
else:
align_msg.visible = False
self.alignment_pub.publish(align_msg)
def main():
rospy.init_node('oculus')
oc = Oculus()
rospy.spin()
if __name__ == "__main__":
main()
|
<filename>backend/estimator_2d.py
"""
@author: <NAME>
@contact: <EMAIL>
"""
import sys
import os.path as osp
project_path = osp.abspath ( osp.join ( osp.dirname ( __file__ ), '..' ) )
if project_path not in sys.path:
sys.path.insert ( 0, project_path )
from backend.light_head_rcnn.person_detector import PersonDetector
from backend.tf_cpn.Detector2D import Detector2D
class Estimator_2d ( object ):
def __init__(self, DEBUGGING=False):
self.bbox_detector = PersonDetector ( show_image=DEBUGGING )
self.pose_detector_2d = Detector2D ( show_image=DEBUGGING )
def estimate_2d(self, img, img_id):
bbox_result = self.bbox_detector.detect ( img, img_id )
dump_results = self.pose_detector_2d.detect ( bbox_result )
return dump_results
if __name__ == '__main__':
import cv2
img = cv2.imread ( 'datasets/Shelf/Camera0/img_000000.png' )
est = Estimator_2d ()
est.estimate_2d ( img, 0 )
|
<gh_stars>10-100
#include <stdio.h>
#include "logger.h"
#include "loggerconf.h"
int main(int argc, char* argv[]) {
char filename[256];
if (argc <= 1) {
printf("usage: %s <conf file>\n", argv[0]);
return 1;
}
strncpy(filename, argv[1], strlen(argv[1]));
logger_configure(filename);
LOG_TRACE("trace");
LOG_DEBUG("degug");
LOG_INFO("info");
LOG_WARN("warn");
LOG_ERROR("error");
LOG_FATAL("fatal");
return 0;
}
|
# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Create, possibly migrate from, the unencrypted stateful partition, and bind
# mount the /var and /home/chronos mounts from the encrypted filesystem
# /mnt/stateful_partition/encrypted, all managed by the "mount-encrypted"
# helper. Takes the same arguments as mount-encrypted. Since /var is managed by
# mount-encrypted, it should not be created in the unencrypted stateful
# partition. Its mount point in the root filesystem exists already from the
# rootfs image. Since /home is still mounted from the unencrypted stateful
# partition, having /home/chronos already doesn't matter. It will be created by
# mount-encrypted if it is missing. These mounts inherit nodev,noexec,nosuid
# from the encrypted filesystem /mnt/stateful_partition/encrypted.
mount_var_and_home_chronos() {
mount-encrypted "$@" >/run/mount_encrypted/mount-encrypted.log 2>&1
}
# Give mount-encrypted umount 10 times to retry, otherwise
# it will fail with 'device is busy' because lazy umount does not finish
# clearing all reference points yet. Check crosbug.com/p/21345.
umount_var_and_home_chronos() {
# Check if the encrypted stateful partition is mounted.
if ! mountpoint -q "/mnt/stateful_partition/encrypted"; then
return 0
fi
local rc=0
for _ in 1 2 3 4 5 6 7 8 9 10; do
mount-encrypted umount
rc="$?"
if [ "${rc}" -eq "0" ]; then
break
fi
sleep 0.1
done
return "${rc}"
}
|
import React from 'react'
import PropTypes from 'prop-types'
import { makeStyles } from '@material-ui/styles'
import ProgressBar from 'core/components/progress/ProgressBar'
import Tooltip from '@material-ui/core/Tooltip'
import { identity } from 'ramda'
const useStyles = makeStyles(theme => ({
root: {
display: 'flex',
flexFlow: 'row nowrap',
paddingBottom: theme.spacing(0.5),
},
label: {
fontSize: 12,
width: 58,
fontWeight: 'bold',
},
value: {
fontSize: 12,
whiteSpace: 'nowrap',
width: 77,
},
percent: {
width: 142,
},
}))
const ResourceUsageTable = ({ label, valueConverter, usedText, units, stats, precision }) => {
const classes = useStyles()
const { current, max, percent } = stats
const curStr = valueConverter(current).toFixed(precision)
const maxStr = valueConverter(max).toFixed(precision)
const percentStr = `${Math.round(percent)}% ${usedText}`
return (
<Tooltip title={`${curStr} ${units} of ${maxStr} ${units} ${usedText}`}>
<div className={classes.root}>
<span className={classes.label}>{label}:</span>
<span className={classes.value}>{curStr} {units}</span>
<span className={classes.percent}>
<ProgressBar
width={140}
percent={percent}
label={percentStr}
/>
</span>
</div>
</Tooltip>
)
}
ResourceUsageTable.propTypes = {
valueConverter: PropTypes.func,
precision: PropTypes.number,
label: PropTypes.string,
usedText: PropTypes.string,
units: PropTypes.string,
stats: PropTypes.shape({
current: PropTypes.number,
max: PropTypes.number,
percent: PropTypes.number,
}),
}
ResourceUsageTable.defaultProps = {
stats: { current: 0, max: 0, percent: 0 },
valueConverter: identity,
usedText: 'used',
units: '',
precision: 2,
}
export default ResourceUsageTable
|
package actionScope;
public class Son {
}
|
const Discord = require('discord.js')
let request, response
request = require('async-request')
module.exports = {
name: 'mojangStatus',
description: 'Generates a command for head drops',
aliases: ['mojang', 'minecraftStatus'],
async execute (message, args) {
const colors = {'green': '0x55ACEE', 'yellow': '0x292F33', 'red': '0xDD2E44'}
function colorToEmoji (color) {
switch (color) {
case 'green':
return ':large_blue_circle:'
case 'yellow':
return ':black_circle:'
case 'red':
return ':red_circle:'
default:
return ':white_circle:'
}
}
try {
// Find out the MC UUID of the name input
response = await request('https://status.mojang.com/check')
const status = JSON.parse(response.body)
let logMessage = new Discord.RichEmbed()
.setTitle('Mojang Service status')
.setTimestamp()
let summary = 'green'
status.forEach((status) => {
if ((summary === 'green' && status === 'yellow') || status === 'red') { summary = status }
let service = Object.keys(status)
logMessage.addField(`${colorToEmoji(status[service[0]])} ${service[0]}`, '\u200B', true)
})
logMessage
.setColor(colors[summary])
.setDescription(message.client.eris.getRandomMessage('mojangStatusCommand', summary))
message.channel.send(message.client.eris.getRandomMessage('general', 'delivery'), logMessage)
} catch (e) {
let logMessage = new Discord.RichEmbed()
.setTitle('Service status is currently unavailable')
.setColor(0xAA00AA)
.setDescription(e)
message.channel.send(message.client.eris.getRandomMessage('mojangStatusCommand', 'error'), logMessage)
}
}
}
|
#!/bin/bash
git clone http://github.com/lh3/wgsim
cd wgsim
gcc -g -O2 -Wall -I${PREFIX}/include -L${PREFIX}/lib -o wgsim wgsim.c -lz -lm
cp wgsim wgsim_eval.pl $PREFIX/bin/
|
<reponame>lovelyHarper/apl-suggester
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
import { PackageLoader, ILoadedResult } from './PackageLoader';
/**
* Loader to load third party custom components to internal data model.
* Cache duplicatd fetching by using url as unique key.
*/
export class ThirdPartyComponentLoader {
/**
* Key (url/name) to IAplComonent[] map, used as cache.
* @private
* @memberof ThirdPartyComponentLoader
*/
private packageToComponentPromiseMap : Map<string, Promise<ILoadedResult[]>>;
private static instance : ThirdPartyComponentLoader;
/**
* Creates an instance of ThirdPartyComponentLoader.
* Make it public only for testing.
* @memberof ThirdPartyComponentLoader
*/
public constructor() {
this.packageToComponentPromiseMap = new Map<string, Promise<ILoadedResult[]>>();
}
/**
* Singleton method.
* @static
* @returns
* @memberof ThirdPartyComponentLoader
*/
public static getInstance() {
if (!this.instance) {
this.instance = new ThirdPartyComponentLoader();
}
return this.instance;
}
/**
* Load packages.
* @param {any[]} importPackages
* @returns {Promise<IAplComponent[]>}
* @memberof ThirdPartyComponentLoader
*/
public async load(importPackages : any[]) : Promise<ILoadedResult[]> {
if (!importPackages) {
return [];
}
const result = await Promise.all(importPackages.map((eachPackage) => {
return this.loadSinglePackage(eachPackage);
}));
return [].concat(...result);
}
private loadSinglePackage(singlePackage : object) : Promise<ILoadedResult[]> {
let uniqueKey = singlePackage['name'];
if (singlePackage['source']) {
uniqueKey = singlePackage['source'];
}
if (singlePackage['version']) {
uniqueKey += '_' + singlePackage['version'];
}
const promiseForGivenPackage = this.packageToComponentPromiseMap.get(uniqueKey);
if (promiseForGivenPackage) {
return promiseForGivenPackage;
} else {
const packageList = new PackageLoader({}).load([singlePackage]);
this.packageToComponentPromiseMap.set(uniqueKey, packageList);
return packageList;
}
}
}
|
<reponame>samlanning/hub-detect<filename>hub-detect/src/test/groovy/com/blackducksoftware/integration/hub/detect/detector/cpan/CpanListParserTest.java
/*
* Copyright (C) 2017 Black Duck Software Inc.
* http://www.blackducksoftware.com/
* All rights reserved.
*
* This software is the confidential and proprietary information of
* Black Duck Software ("Confidential Information"). You shall not
* disclose such Confidential Information and shall use it only in
* accordance with the terms of the license agreement you entered into
* with Black Duck Software.
*/
package com.blackducksoftware.integration.hub.detect.detector.cpan;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.springframework.util.StringUtils;
import com.blackducksoftware.integration.hub.detect.testutils.DependencyGraphResourceTestUtil;
import com.blackducksoftware.integration.hub.detect.testutils.TestUtil;
import com.synopsys.integration.hub.bdio.graph.DependencyGraph;
import com.synopsys.integration.hub.bdio.model.externalid.ExternalIdFactory;
public class CpanListParserTest {
private final TestUtil testUtil = new TestUtil();
private final CpanListParser cpanListParser = new CpanListParser(new ExternalIdFactory());
private final List<String> cpanListText = Arrays.asList(testUtil.getResourceAsUTF8String("/cpan/cpanList.txt").split("\n"));
private final List<String> showDepsText = Arrays.asList(testUtil.getResourceAsUTF8String("/cpan/showDeps.txt").split("\n"));
@Test
public void parseTest() {
String cpanList = "Test::More\t1.2.3" + "\n";
cpanList += "Test::Less\t1.2.4" + "\n";
cpanList += "This is an invalid line" + "\n";
cpanList += "This\t1\t1also\t1invalid" + "\n";
cpanList += "Invalid" + "\n";
final List<String> tokens = Arrays.asList(StringUtils.tokenizeToStringArray(cpanList, "\n"));
final Map<String, String> nodeMap = cpanListParser.createNameVersionMap(tokens);
assertEquals(2, nodeMap.size());
assertNotNull(nodeMap.get("Test::More"));
assertNotNull(nodeMap.get("Test::Less"));
assertEquals("1.2.3", nodeMap.get("Test::More"));
assertEquals("1.2.4", nodeMap.get("Test::Less"));
}
@Test
public void getDirectModuleNamesTest() {
final List<String> names = cpanListParser.getDirectModuleNames(showDepsText);
assertEquals(4, names.size());
assertTrue(names.contains("ExtUtils::MakeMaker"));
assertTrue(names.contains("Test::More"));
assertTrue(names.contains("perl"));
assertTrue(names.contains("ExtUtils::MakeMaker"));
}
@Test
public void makeDependencyNodesTest() {
final DependencyGraph dependencyGraph = cpanListParser.parse(cpanListText, showDepsText);
DependencyGraphResourceTestUtil.assertGraph("/cpan/expectedDependencyNodes_graph.json", dependencyGraph);
}
}
|
<reponame>lyutl/2021-2-level-ctlr
"""
Implementation of POSFrequencyPipeline for score ten only.
"""
import json
import re
from constants import ASSETS_PATH
from core_utils.article import ArtifactType
from core_utils.visualizer import visualize
from pipeline import CorpusManager, validate_dataset
class EmptyFileError(Exception):
"""
Custom error
"""
class POSFrequencyPipeline:
def __init__(self, corpus_manager: CorpusManager):
self.corpus_manager = corpus_manager
def run(self):
articles = self.corpus_manager.get_articles().values()
for article in articles:
pos_freq_dict = self._calculate_frequencies(article)
self._write_to_meta(article, pos_freq_dict)
visualize(statistics=pos_freq_dict, path_to_save=ASSETS_PATH / f'{article.article_id}_image.png')
def _calculate_frequencies(self, article):
article_path = article.get_file_path(ArtifactType.single_tagged)
with open(article_path, 'r', encoding='utf-8') as file:
text = file.read()
if not text:
raise EmptyFileError
pos_freq_dict = {}
pos_pattern = re.compile(r'<([A-Z]+)')
for pos_match in pos_pattern.findall(text):
pos_freq_dict[pos_match] = pos_freq_dict.get(pos_match, 0) + 1
return pos_freq_dict
def _write_to_meta(self, article, pos_freq_dict):
with open(ASSETS_PATH / article.get_meta_file_path(), 'r', encoding='utf-8') as meta:
meta_file = json.load(meta)
meta_file.update({'pos_frequencies': pos_freq_dict})
with open(ASSETS_PATH / article.get_meta_file_path(), 'w', encoding='utf-8') as meta:
json.dump(meta_file, meta, ensure_ascii=False, indent=4, separators=(',', ':'))
def main():
validate_dataset(ASSETS_PATH)
corpus_manager = CorpusManager(ASSETS_PATH)
pipeline = POSFrequencyPipeline(corpus_manager)
pipeline.run()
if __name__ == "__main__":
main()
|
<reponame>Pluxbox/radiomanager-java-client
/*
* RadioManager
* RadioManager
*
* OpenAPI spec version: 2.0
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package com.pluxbox.radiomanager.api.models;
import java.util.Objects;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import com.pluxbox.radiomanager.api.models.BroadcastRelationsModelType;
import com.pluxbox.radiomanager.api.models.Campaign;
import com.pluxbox.radiomanager.api.models.CampaignOutputOnly;
import com.pluxbox.radiomanager.api.models.CampaignRelations;
import com.pluxbox.radiomanager.api.models.CampaignRelationsItems;
import com.pluxbox.radiomanager.api.models.CampaignTemplateItem;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
import java.time.OffsetDateTime;
/**
* CampaignResult
*/
public class CampaignResult {
@SerializedName("id")
private Long id = null;
@SerializedName("updated_at")
private OffsetDateTime updatedAt = null;
@SerializedName("created_at")
private OffsetDateTime createdAt = null;
@SerializedName("deleted_at")
private OffsetDateTime deletedAt = null;
@SerializedName("item")
private CampaignTemplateItem item = null;
@SerializedName("_external_station_id")
private Long externalStationId = null;
@SerializedName("model_type_id")
private Long modelTypeId = null;
@SerializedName("field_values")
private Object fieldValues = null;
@SerializedName("title")
private String title = null;
@SerializedName("start")
private OffsetDateTime start = null;
@SerializedName("stop")
private OffsetDateTime stop = null;
@SerializedName("recommended")
private Boolean recommended = null;
@SerializedName("description")
private String description = null;
@SerializedName("items")
private CampaignRelationsItems items = null;
@SerializedName("model_type")
private BroadcastRelationsModelType modelType = null;
public CampaignResult id(Long id) {
this.id = id;
return this;
}
/**
* Get id
* @return id
**/
@ApiModelProperty(example = "1", required = true, value = "")
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public CampaignResult updatedAt(OffsetDateTime updatedAt) {
this.updatedAt = updatedAt;
return this;
}
/**
* Get updatedAt
* @return updatedAt
**/
@ApiModelProperty(example = "2016-01-11T22:01:11+02:00", required = true, value = "")
public OffsetDateTime getUpdatedAt() {
return updatedAt;
}
public void setUpdatedAt(OffsetDateTime updatedAt) {
this.updatedAt = updatedAt;
}
public CampaignResult createdAt(OffsetDateTime createdAt) {
this.createdAt = createdAt;
return this;
}
/**
* Get createdAt
* @return createdAt
**/
@ApiModelProperty(example = "2016-01-11T22:01:11+02:00", required = true, value = "")
public OffsetDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(OffsetDateTime createdAt) {
this.createdAt = createdAt;
}
public CampaignResult deletedAt(OffsetDateTime deletedAt) {
this.deletedAt = deletedAt;
return this;
}
/**
* Get deletedAt
* @return deletedAt
**/
@ApiModelProperty(example = "2016-01-11T22:01:11+02:00", required = true, value = "")
public OffsetDateTime getDeletedAt() {
return deletedAt;
}
public void setDeletedAt(OffsetDateTime deletedAt) {
this.deletedAt = deletedAt;
}
public CampaignResult item(CampaignTemplateItem item) {
this.item = item;
return this;
}
/**
* Get item
* @return item
**/
@ApiModelProperty(value = "")
public CampaignTemplateItem getItem() {
return item;
}
public void setItem(CampaignTemplateItem item) {
this.item = item;
}
public CampaignResult externalStationId(Long externalStationId) {
this.externalStationId = externalStationId;
return this;
}
/**
* Get externalStationId
* @return externalStationId
**/
@ApiModelProperty(value = "")
public Long getExternalStationId() {
return externalStationId;
}
public void setExternalStationId(Long externalStationId) {
this.externalStationId = externalStationId;
}
public CampaignResult modelTypeId(Long modelTypeId) {
this.modelTypeId = modelTypeId;
return this;
}
/**
* Get modelTypeId
* @return modelTypeId
**/
@ApiModelProperty(example = "1", required = true, value = "")
public Long getModelTypeId() {
return modelTypeId;
}
public void setModelTypeId(Long modelTypeId) {
this.modelTypeId = modelTypeId;
}
public CampaignResult fieldValues(Object fieldValues) {
this.fieldValues = fieldValues;
return this;
}
/**
* Get fieldValues
* @return fieldValues
**/
@ApiModelProperty(value = "")
public Object getFieldValues() {
return fieldValues;
}
public void setFieldValues(Object fieldValues) {
this.fieldValues = fieldValues;
}
public CampaignResult title(String title) {
this.title = title;
return this;
}
/**
* Get title
* @return title
**/
@ApiModelProperty(example = "FooBar", value = "")
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public CampaignResult start(OffsetDateTime start) {
this.start = start;
return this;
}
/**
* Get start
* @return start
**/
@ApiModelProperty(example = "2016-01-11T22:01:11+02:00", required = true, value = "")
public OffsetDateTime getStart() {
return start;
}
public void setStart(OffsetDateTime start) {
this.start = start;
}
public CampaignResult stop(OffsetDateTime stop) {
this.stop = stop;
return this;
}
/**
* Get stop
* @return stop
**/
@ApiModelProperty(example = "2016-01-11T22:01:11+02:00", required = true, value = "")
public OffsetDateTime getStop() {
return stop;
}
public void setStop(OffsetDateTime stop) {
this.stop = stop;
}
public CampaignResult recommended(Boolean recommended) {
this.recommended = recommended;
return this;
}
/**
* Get recommended
* @return recommended
**/
@ApiModelProperty(example = "true", value = "")
public Boolean isRecommended() {
return recommended;
}
public void setRecommended(Boolean recommended) {
this.recommended = recommended;
}
public CampaignResult description(String description) {
this.description = description;
return this;
}
/**
* Get description
* @return description
**/
@ApiModelProperty(example = "<div class=\\'rm-content\\'></div>\\n", value = "")
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public CampaignResult items(CampaignRelationsItems items) {
this.items = items;
return this;
}
/**
* Get items
* @return items
**/
@ApiModelProperty(value = "")
public CampaignRelationsItems getItems() {
return items;
}
public void setItems(CampaignRelationsItems items) {
this.items = items;
}
public CampaignResult modelType(BroadcastRelationsModelType modelType) {
this.modelType = modelType;
return this;
}
/**
* Get modelType
* @return modelType
**/
@ApiModelProperty(value = "")
public BroadcastRelationsModelType getModelType() {
return modelType;
}
public void setModelType(BroadcastRelationsModelType modelType) {
this.modelType = modelType;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CampaignResult campaignResult = (CampaignResult) o;
return Objects.equals(this.id, campaignResult.id) &&
Objects.equals(this.updatedAt, campaignResult.updatedAt) &&
Objects.equals(this.createdAt, campaignResult.createdAt) &&
Objects.equals(this.deletedAt, campaignResult.deletedAt) &&
Objects.equals(this.item, campaignResult.item) &&
Objects.equals(this.externalStationId, campaignResult.externalStationId) &&
Objects.equals(this.modelTypeId, campaignResult.modelTypeId) &&
Objects.equals(this.fieldValues, campaignResult.fieldValues) &&
Objects.equals(this.title, campaignResult.title) &&
Objects.equals(this.start, campaignResult.start) &&
Objects.equals(this.stop, campaignResult.stop) &&
Objects.equals(this.recommended, campaignResult.recommended) &&
Objects.equals(this.description, campaignResult.description) &&
Objects.equals(this.items, campaignResult.items) &&
Objects.equals(this.modelType, campaignResult.modelType);
}
@Override
public int hashCode() {
return Objects.hash(id, updatedAt, createdAt, deletedAt, item, externalStationId, modelTypeId, fieldValues, title, start, stop, recommended, description, items, modelType);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class CampaignResult {\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" updatedAt: ").append(toIndentedString(updatedAt)).append("\n");
sb.append(" createdAt: ").append(toIndentedString(createdAt)).append("\n");
sb.append(" deletedAt: ").append(toIndentedString(deletedAt)).append("\n");
sb.append(" item: ").append(toIndentedString(item)).append("\n");
sb.append(" externalStationId: ").append(toIndentedString(externalStationId)).append("\n");
sb.append(" modelTypeId: ").append(toIndentedString(modelTypeId)).append("\n");
sb.append(" fieldValues: ").append(toIndentedString(fieldValues)).append("\n");
sb.append(" title: ").append(toIndentedString(title)).append("\n");
sb.append(" start: ").append(toIndentedString(start)).append("\n");
sb.append(" stop: ").append(toIndentedString(stop)).append("\n");
sb.append(" recommended: ").append(toIndentedString(recommended)).append("\n");
sb.append(" description: ").append(toIndentedString(description)).append("\n");
sb.append(" items: ").append(toIndentedString(items)).append("\n");
sb.append(" modelType: ").append(toIndentedString(modelType)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
<reponame>cprodhomme/active_scaffold<filename>lib/active_scaffold/bridges/carrierwave/carrierwave_bridge.rb
module ActiveScaffold
module Bridges
class Carrierwave
module CarrierwaveBridge
def initialize(model_id)
super
return unless model.respond_to?(:uploaders) && model.uploaders.present?
update.multipart = true
create.multipart = true
model.uploaders.each_key do |field|
configure_carrierwave_field(field.to_sym)
end
end
private
def configure_carrierwave_field(field)
columns << field
columns[field].form_ui ||= :carrierwave # :TODO thumbnail
columns[field].params.add "#{field}_cache"
columns[field].params.add "remove_#{field}"
end
end
end
end
end
|
package io.opensphere.mantle.data.geom.style.impl;
import java.util.List;
import java.util.Set;
import org.apache.log4j.Logger;
import io.opensphere.core.Toolbox;
import io.opensphere.core.util.collections.New;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.MapVisualizationStyleCategory;
import io.opensphere.mantle.data.element.MetaDataProvider;
import io.opensphere.mantle.data.geom.MapGeometrySupport;
import io.opensphere.mantle.data.geom.MapLocationGeometrySupport;
import io.opensphere.mantle.data.geom.style.MutableVisualizationStyle;
import io.opensphere.mantle.data.geom.style.ParameterHint;
import io.opensphere.mantle.data.geom.style.VisualizationStyle;
import io.opensphere.mantle.data.geom.style.VisualizationStyleParameter;
import io.opensphere.mantle.data.geom.style.VisualizationStyleParameterFlags;
import io.opensphere.mantle.data.geom.style.impl.ui.AbstractStyleParameterEditorPanel;
import io.opensphere.mantle.data.geom.style.impl.ui.ComboBoxStyleParameterEditorPanel;
import io.opensphere.mantle.data.geom.style.impl.ui.GroupedMiniStyleEditorPanel;
import io.opensphere.mantle.data.geom.style.impl.ui.GroupedStyleParameterEditorPanel;
import io.opensphere.mantle.data.geom.style.impl.ui.PanelBuilder;
import io.opensphere.mantle.data.geom.style.impl.ui.StyleParameterEditorGroupPanel;
import io.opensphere.mantle.data.impl.specialkey.HeadingKey;
import io.opensphere.mantle.data.impl.specialkey.LineOfBearingKey;
/**
* The Class DynamicEllipseFeatureVisualization.
*/
public class DynamicLOBFeatureVisualization extends AbstractLOBFeatureVisualizationStyle
{
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(DynamicLOBFeatureVisualization.class);
/** The Constant ourPropertyKeyPrefix. */
@SuppressWarnings("hiding")
public static final String ourPropertyKeyPrefix = "DynamicLOBFeatureVisualization";
/** The Constant ourLOBOrientationColumnKey. */
public static final String ourLOBOrientationColumnKey = ourPropertyKeyPrefix + ".LOBOrientationColumnKey";
/** The Constant ourDefaultLOBOrientationParameter. */
public static final VisualizationStyleParameter ourDefaultLOBOrientationParameter = new VisualizationStyleParameter(
ourLOBOrientationColumnKey, "Lob Orientation Column", null, String.class,
new VisualizationStyleParameterFlags(true, true), ParameterHint.hint(false, true));
/**
* Instantiates a new dynamic LOB feature visualization.
*
* @param tb the {@link Toolbox}
*/
public DynamicLOBFeatureVisualization(Toolbox tb)
{
super(tb);
}
/**
* Instantiates a new dynamic LOB feature visualization.
*
* @param tb the {@link Toolbox}
* @param dtiKey the dti key
*/
public DynamicLOBFeatureVisualization(Toolbox tb, String dtiKey)
{
super(tb, dtiKey);
}
@Override
public DynamicLOBFeatureVisualization clone()
{
return (DynamicLOBFeatureVisualization)super.clone();
}
@Override
public DynamicLOBFeatureVisualization deriveForType(String dtiKey)
{
DynamicLOBFeatureVisualization clone = clone();
clone.setDTIKey(dtiKey);
clone.initializeFromDataType();
return clone;
}
@Override
public AppliesTo getAppliesTo()
{
return AppliesTo.INDIVIDUAL_ELEMENT;
}
@Override
public Class<? extends MapGeometrySupport> getConvertedClassType()
{
return MapLocationGeometrySupport.class;
}
/**
* Gets the lOB orientation column key.
*
* @return the lOB orientation column key
*/
public String getLOBOrientationColumnKey()
{
return (String)getStyleParameterValue(ourLOBOrientationColumnKey);
}
@Override
public GroupedMiniStyleEditorPanel getMiniUIPanel()
{
GroupedMiniStyleEditorPanel mUIPanel = super.getMiniUIPanel();
List<AbstractStyleParameterEditorPanel> paramList = New.list();
MutableVisualizationStyle style = mUIPanel.getChangedStyle();
DataTypeInfo dti = StyleUtils.getDataTypeInfoFromKey(getToolbox(), style.getDTIKey());
if (dti != null && dti.getMetaDataInfo() != null && dti.getMetaDataInfo().getKeyCount() > 0)
{
paramList.add(new ComboBoxStyleParameterEditorPanel(StyleUtils.createComboBoxMiniPanelBuilder("LOB Column"), style,
ourLOBOrientationColumnKey, false, false, true, dti.getMetaDataInfo().getKeyNames()));
StyleParameterEditorGroupPanel paramGrp = new StyleParameterEditorGroupPanel(null, paramList, false, 1);
mUIPanel.addGroupAtTop(paramGrp);
}
return mUIPanel;
}
@Override
public MapVisualizationStyleCategory getStyleCategory()
{
return MapVisualizationStyleCategory.LOCATION_FEATURE;
}
@Override
public String getStyleDescription()
{
return "Feature visualization controls for dynamic Line-of-bearing(LOB), where the LOB parameter"
+ " for orientation can be selected from meta-data column values";
}
@Override
public String getStyleName()
{
return "Lines of Bearing (Dynamic)";
}
@Override
public GroupedStyleParameterEditorPanel getUIPanel()
{
GroupedStyleParameterEditorPanel uiPanel = super.getUIPanel();
if (getDTIKey() != null)
{
MutableVisualizationStyle style = uiPanel.getChangedStyle();
DataTypeInfo dti = StyleUtils.getDataTypeInfoFromKey(getToolbox(), style.getDTIKey());
if (dti != null && dti.getMetaDataInfo() != null && dti.getMetaDataInfo().getKeyCount() > 0)
{
List<AbstractStyleParameterEditorPanel> paramList = New.list();
VisualizationStyleParameter param = style.getStyleParameter(ourLOBOrientationColumnKey);
paramList.add(new ComboBoxStyleParameterEditorPanel(PanelBuilder.get(param.getName()), style,
ourLOBOrientationColumnKey, false, false, true, dti.getMetaDataInfo().getKeyNames()));
StyleParameterEditorGroupPanel paramGrp = new StyleParameterEditorGroupPanel("Dynamic LOB", paramList);
uiPanel.addGroup(paramGrp);
}
}
return uiPanel;
}
@Override
public void initialize()
{
super.initialize();
setParameter(ourDefaultLOBOrientationParameter);
}
@Override
public void initialize(Set<VisualizationStyleParameter> paramSet)
{
super.initialize(paramSet);
paramSet.stream().filter(p -> p.getKey() != null && p.getKey().startsWith(ourPropertyKeyPrefix))
.forEach(this::setParameter);
}
@Override
public void initializeFromDataType()
{
super.initializeFromDataType();
if (getDTIKey() != null)
{
DataTypeInfo dti = StyleUtils.getDataTypeInfoFromKey(getToolbox(), getDTIKey());
if (dti != null && dti.getMetaDataInfo() != null)
{
String lobOrientKey = dti.getMetaDataInfo().getKeyForSpecialType(LineOfBearingKey.DEFAULT);
if (lobOrientKey != null)
{
setParameter(ourLOBOrientationColumnKey, lobOrientKey, NO_EVENT_SOURCE);
}
else
{
String headingKey = dti.getMetaDataInfo().getKeyForSpecialType(HeadingKey.DEFAULT);
if (headingKey != null)
{
setParameter(ourLOBOrientationColumnKey, headingKey, NO_EVENT_SOURCE);
}
}
}
}
}
@Override
public VisualizationStyle newInstance(Toolbox tb)
{
VisualizationStyle vs = new DynamicLOBFeatureVisualization(tb);
vs.initialize();
return vs;
}
/**
* Sets the lob orientation column key.
*
* @param key the key
* @param source the source
*/
public void setLOBOrientationColumnKey(String key, Object source)
{
StyleUtils.setMetaDataColumnKeyProperty(this, ourLOBOrientationColumnKey, key, source);
}
@Override
public boolean supportsLabels()
{
return true;
}
@Override
public Float getLobOrientation(long elementId, MapGeometrySupport mgs, MetaDataProvider mdi)
{
Float result = null;
String ornKey = getLOBOrientationColumnKey();
if (ornKey != null)
{
try
{
double ornVal = StyleUtils.convertValueToDouble(mdi.getValue(ornKey));
result = Float.valueOf((float)ornVal);
}
catch (NumberFormatException e)
{
result = null;
LOGGER.error("Error converting LOB ORN from data type" + getDTIKey() + " ElId[" + elementId + "] Values: ORN["
+ mdi.getValue(ornKey) + "]");
}
}
return result;
}
}
|
<html>
<head>
<title>Customer Cart</title>
</head>
<body>
<h1>Customer Cart</h1>
<div>
<table>
<thead>
<tr>
<th>Item</th>
<th>Quantity</th>
<th>Price</th>
</tr>
</thead>
<tbody>
<!-- Generate row for each item -->
</tbody>
</table>
</div>
</body>
</html> |
package com.example.myapplication;
import android.os.Bundle;
import android.app.Activity;
import android.view.Menu;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.EditText;
import android.widget.ListView;
public class MainActivity extends Activity {
ArrayAdapter<String> adapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ListView listView = (ListView) findViewById(R.id.listView);
// Create an ArrayAdapter from your list
adapter = new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1,
listItems);
listView.setAdapter(adapter);
// connect the searchbar and listview
EditText searchBar = (EditText) findViewById(R.id.searchBar);
listView.setTextFilterEnabled(true);
searchBar.addTextChangedListener(new TextWatcher(){
@Override
public void onTextChanged(CharSequence s, int start, int before, int count){
adapter.getFilter(). filter(s.toString());
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
} |
<gh_stars>1-10
package elasta.webutils.model;
import io.vertx.core.http.HttpMethod;
import lombok.Builder;
import lombok.Value;
import java.util.Objects;
/**
* Created by sohan on 5/10/2017.
*/
@Value
@Builder
public final class UriAndHttpMethodPair {
final String uri;
final HttpMethod httpMethod;
UriAndHttpMethodPair(String uri, HttpMethod httpMethod) {
Objects.requireNonNull(uri);
Objects.requireNonNull(httpMethod);
this.uri = uri;
this.httpMethod = httpMethod;
}
}
|
#!/bin/bash
# Script is brought to you by ATADA_Stakepool, Telegram @atada_stakepool
#load variables from common.sh
# socket Path to the node.socket (also exports socket to CARDANO_NODE_SOCKET_PATH)
# genesisfile Path to the genesis.json
# magicparam TestnetMagic parameter
# cardanocli Path to the cardano-cli executable
# cardanonode Path to the cardano-node executable
. "$(dirname "$0")"/00_common.sh
#Check token-metadata-creator tool if given path is ok, if not try to use the one in the scripts folder
if ! exists "${cardanometa}"; then
#Try the one in the scripts folder
if [[ -f "${scriptDir}/token-metadata-creator" ]]; then cardanometa="${scriptDir}/token-metadata-creator";
else majorError "Path ERROR - Path to the 'token-metadata-creator' binary is not correct or 'token-metadata-creator' binaryfile is missing!\nYou can find it here: https://github.com/input-output-hk/offchain-metadata-tools\nThis is needed to format and sign the NativeAsset Metadata Registry file. Please check your 00_common.sh or common.inc settings."; exit 1; fi
fi
case $# in
1 ) policyName="$(echo $1 | cut -d. -f 1)";
assetName="$(echo $1 | cut -d. -f 2-)"; assetName=$(basename "${assetName}" .asset); #assetName=${assetName//./};
;;
* ) cat >&2 <<EOF
Usage: $(basename $0) <PolicyName.AssetName>
EOF
exit 1;; esac
assetFileName="${policyName}.${assetName}.asset" #save the output assetfilename here, because at that state the assetName is with or without the {} brackets
# Check for needed input files
if [ ! -f "${policyName}.policy.id" ]; then echo -e "\n\e[35mERROR - \"${policyName}.policy.id\" id-file does not exist! Please create it first with script 10.\e[0m"; exit 1; fi
if [ ! -f "${policyName}.policy.script" ]; then echo -e "\n\e[35mERROR - \"${policyName}.policy.script\" scriptfile does not exist! Please create it first with script 10.\e[0m"; exit 1; fi
if [ -f "${policyName}.policy.hwsfile" ]; then echo -e "\n\e[35mERROR - \"${policyName}.policy.hwsfile\" - Signing with hardware wallet policies is currently not supported :-( \e[0m"; exit 1; fi
if [ ! -f "${policyName}.policy.skey" ]; then echo -e "\n\e[35mERROR - \"${policyName}.policy.skey\" signing key does not exist! Please create it first with script 10.\e[0m"; exit 1; fi
policyID=$(cat ${policyName}.policy.id)
#Check assetName for alphanummeric / hex
if [[ "${assetName}" == ".asset" ]]; then assetName="";
elif [[ "${assetName,,}" =~ ^\{([[:xdigit:]][[:xdigit:]]){1,}\}$ ]]; then assetName=${assetName,,}; assetName=${assetName:1:-1}; assetHexName=${assetName} #store given hexname in own variable
elif [[ ! "${assetName}" == "${assetName//[^[:alnum:]]/}" ]]; then echo -e "\e[35mError - Your given AssetName '${assetName}' should only contain alphanummeric chars!
Otherwise you can use the binary hexformat like \"{8ac33ed560000eacce}\" as the assetName! Make sure to use full hex-pairs.\e[0m"; exit 1;
else assetName=$(convert_assetNameASCII2HEX ${assetName})
fi
#assetName is in HEX-Format after this point
if [[ ${#assetName} -gt 64 ]]; then echo -e "\e[35mError - Your given AssetName is too long, maximum of 32 bytes allowed!\e[0m"; exit 1; fi #checking for a length of 64 because a byte is two hexchars
assetNameBech=$(convert_tokenName2BECH "${policyID}${assetName}" "")
assetSubject="${policyID}${assetName}"
echo -e "\e[0mGenerating Metadata for the Asset \e[32m'${assetName}' -> '$(convert_assetNameHEX2ASCII_ifpossible ${assetName})'\e[0m with Policy \e[32m'${policyName}'\e[0m: ${assetNameBech}"
#set timetolife (inherent hereafter) to the currentTTL or to the value set in the policy.script for the "before" slot (limited policy lifespan)
ttlFromScript=$(cat ${policyName}.policy.script | jq -r ".scripts[] | select(.type == \"before\") | .slot" 2> /dev/null || echo "unlimited")
if [[ ! ${ttlFromScript} == "unlimited" ]]; then ttl=${ttlFromScript}; else ttl=$(get_currentTTL); fi
echo
echo -e "\e[0mPolicy valid before Slot-Height:\e[33m ${ttlFromScript}\e[0m"
echo
#If there is no Asset-File, build up the skeleton and add some initial data
if [ ! -f "${assetFileName}" ]; then
assetFileJSON="{}"
assetFileJSON=$(jq ". += {metaName: \"${assetName:0:50}\",
metaDescription: \"\",
\"---\": \"--- Optional additional info ---\",
metaDecimals: \"\",
metaTicker: \"\",
metaUrl: \"\",
metaLogoPNG: \"\",
\"===\": \"--- DO NOT EDIT BELOW THIS LINE !!! ---\",
minted: \"0\",
name: \"${assetName}\",
hexname: \"${assetHexName}\",
bechName: \"${assetNameBech}\",
policyID: \"${policyID}\",
policyValidBeforeSlot: \"${ttlFromScript}\",
subject: \"${assetSubject}\",
sequenceNumber: \"0\",
lastUpdate: \"$(date -R)\",
lastAction: \"created Asset-File\"}" <<< ${assetFileJSON})
file_unlock ${assetFileName}
echo -e "${assetFileJSON}" > ${assetFileName}
echo -e "\e[0mAsset-File: \e[32m ${assetFileName} \e[90m\n"
cat ${assetFileName}
echo
echo -e "\e[33mA new Asset-File \e[32m'${assetFileName}'\e[33m was created. Please edit the values for the meta-Entries\nto fit your needs. After that, save the file and rerun this script again!"
echo -e "\e[0m\n"
exit
fi
#Asset-File exists, lets read out the parameters and save them back in the order shown above
#so we have a better editing format in there
#Build Skeleton, all available entries in the real assetFileJSON will overwrite the skeleton entries
assetFileSkeletonJSON=$(jq ". += {metaName: \"${assetName}\",
metaDescription: \"\",
\"---\": \"--- Optional additional info ---\",
metaDecimals: \"0\",
metaTicker: \"\",
metaUrl: \"\",
metaLogoPNG: \"\",
\"===\": \"--- DO NOT EDIT BELOW THIS LINE !!! ---\",
minted: \"0\",
name: \"${assetName}\",
hexname: \"\",
bechName: \"${assetNameBech}\",
policyID: \"${policyID}\",
policyValidBeforeSlot: \"${ttlFromScript}\",
subject: \"${assetSubject}\",
sequenceNumber: \"0\",
lastUpdate: \"$(date -R)\",
lastAction: \"update Asset-File\"}" <<< "{}")
#Read in the current file
assetFileJSON=$(cat ${assetFileName})
#Combine the Skeleton with the real one and
assetFileJSON=$(echo "${assetFileSkeletonJSON} ${assetFileJSON}" | jq -rs 'reduce .[] as $item ({}; . * $item)')
#Write it out again and lock it
file_unlock ${assetFileName}
echo -e "${assetFileJSON}" > ${assetFileName}
file_lock ${assetFileName}
echo -e "\e[0mAsset-File: \e[32m ${assetFileName} \e[90m\n"
echo "${assetFileJSON}"
echo
#So, now we're at the point were we can work with a Full-Data JSON file, now lets check about each
#Metadata Registry parameter
assetSubject=$(jq -r ".subject" <<< ${assetFileJSON})
sequenceNumber=$(jq -r ".sequenceNumber" <<< ${assetFileJSON})
newSequenceNumber=$(( ${sequenceNumber} + 1 ))
echo -e "\e[0mGenerating Token-Registry-JSON for sequenceNumber ${newSequenceNumber}:\e[32m ${assetSubject}.json \e[0m\n"
creatorArray=("entry" "${assetSubject}" "--init")
#Check metaName
echo -ne "Adding 'metaName' ... "
metaName=$(jq -r ".metaName" <<< ${assetFileJSON})
#if [[ ! "${metaName//[[:space:]]}" == "${metaName}" ]]; then echo -e "\e[35mERROR - The metaName '${metaName}' contains spaces, not allowed !\e[0m\n"; exit 1; fi
if [[ ${#metaName} -lt 1 || ${#metaName} -gt 50 ]]; then echo -e "\e[35mERROR - The metaName '${metaName}' is missing or too long. Max. 50chars allowed !\e[0m\n"; exit 1; fi
creatorArray+=("--name" "${metaName}")
echo -e "\e[32mOK\e[0m"
#Check metaDescription
echo -ne "Adding 'metaDescription' ... "
metaDescription=$(jq -r ".metaDescription" <<< ${assetFileJSON})
if [[ ${#metaDescription} -gt 500 ]]; then echo -e "\e[35mERROR - The metaDescription is too long. Max. 500chars allowed !\e[0m\n"; exit 1; fi
creatorArray+=("--description" "${metaDescription}")
echo -e "\e[32mOK\e[0m"
#Add policy script
echo -ne "Adding 'policyScript' ... "
creatorArray+=("--policy" "${policyName}.policy.script")
echo -e "\e[32mOK\e[0m"
#Check metaTicker - optional
metaTicker=$(jq -r ".metaTicker" <<< ${assetFileJSON})
if [[ ! "${metaTicker}" == "" ]]; then
echo -ne "Adding 'metaTicker' ... "
#if [[ ! "${metaTicker//[[:space:]]}" == "${metaTicker}" ]]; then echo -e "\e[35mERROR - The metaTicker '${metaTicker}' contains spaces, not allowed !\e[0m\n"; exit 1; fi
if [[ ${#metaTicker} -lt 2 || ${#metaTicker} -gt 5 ]]; then echo -e "\e[35mERROR - The metaTicker '${metaTicker}' must be between 3-5 chars!\e[0m\n"; exit 1; fi
creatorArray+=("--ticker" "${metaTicker}")
echo -e "\e[32mOK\e[0m"
fi
#Check metaUrl - optional
metaUrl=$(jq -r ".metaUrl" <<< ${assetFileJSON})
if [[ ! "${metaUrl}" == "" ]]; then
echo -ne "Adding 'metaUrl' ... "
if [[ ! "${metaUrl}" =~ https://.* || ${#metaUrl} -gt 250 ]]; then echo -e "\e[35mERROR - The metaUrl has an invalid URL format (must be starting with https://) or is too long. Max. 250 chars allowed !\e[0m\n"; exit 1; fi
creatorArray+=("--url" "${metaUrl}")
echo -e "\e[32mOK\e[0m"
fi
#Check metaDecimals - optional
metaDecimals=$(jq -r ".metaDecimals" <<< ${assetFileJSON})
if [[ ${metaDecimals} -gt 0 ]]; then
echo -ne "Adding 'metaDecimals' ... "
if [[ ${metaDecimals} -gt 255 ]]; then echo -e "\e[35mERROR - The metaDecimals '${metaDecimals}' is too big. Max. value is 255 decimals !\e[0m\n"; exit 1; fi
# metaSubUnitName=$(jq -r ".metaSubUnitName" <<< ${assetFileJSON})
# if [[ ! "${metaSubUnitName//[[:space:]]}" == "${metaSubUnitName}" ]]; then echo -e "\e[35mERROR - The metaSubUnitName '${metaSubUnitName}' contains spaces, not allowed !\e[0m\n"; exit 1; fi
# if [[ ${#metaSubUnitName} -lt 1 || ${#metaSubUnitName} -gt 30 ]]; then echo -e "\e[35mERROR - The metaSubUnitName '${metaSubUnitName}' is too too long. Max. 30chars allowed !\e[0m\n"; exit 1; fi
creatorArray+=("--decimals" "${metaDecimals}")
echo -e "\e[32mOK\e[0m"
fi
#Check metaPNG - optional
metaLogoPNG=$(jq -r ".metaLogoPNG" <<< ${assetFileJSON})
if [[ ! "${metaLogoPNG}" == "" ]]; then
echo -ne "Adding 'metaLogoPNG' ... "
if [ ! -f "${metaLogoPNG}" ]; then echo -e "\e[35mERROR - The metaLogoPNG '${metaLogoPNG}' file was not found !\e[0m\n"; exit 1; fi
if [[ $(file -b "${metaLogoPNG}" | grep "PNG" | wc -l) -eq 0 ]]; then echo -e "\e[35mERROR - The metaLogoPNG '${metaLogoPNG}' is not a valid PNG image file !\e[0m\n"; exit 1; fi
creatorArray+=("--logo" "${metaLogoPNG}")
echo -e "\e[32mOK\e[0m"
fi
echo
#Execute the file generation and add all the parameters
echo -ne "Create JSON draft and adding parameters ... "
#tmp=$(/bin/bash -c "${cardanometa} ${creatorArray}")
tmp=$(${cardanometa} "${creatorArray[@]}")
checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi
echo -e "\e[32mOK\e[90m (${tmp})\e[0m"
#Update the sequenceNumber to the next higher value
echo -ne "Update sequenceNumber to ${newSequenceNumber} ... "
sed -i "s/\"sequenceNumber\":\ .*,/\"sequenceNumber\":\ ${newSequenceNumber},/g" ${tmp}
checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi
echo -e "\e[32mOK\e[0m"
#Sign the metadata registry submission json draft file
echo -ne "Signing with '${policyName}.policy.skey' ... "
tmp=$(${cardanometa} entry ${assetSubject} -a "${policyName}.policy.skey")
checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi
echo -e "\e[32mOK\e[0m"
#Finanlize the metadata registry submission json draft file
echo -ne "Finalizing the draft file ... "
tmp=$(${cardanometa} entry ${assetSubject} --finalize)
checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi
echo -e "\e[32mOK\e[90m (${tmp})\e[0m"
metaFile=${tmp}
##Adding Creator-Credits
#tmpJSON=$(cat ${metaFile})
#tmpJSON=$(jq ". += {tool: {description: \"StakePoolOperator Scripts\", url: \"https://github.com/gitmachtl/scripts\"} } " <<< ${tmpJSON})
#echo -e "${tmpJSON}" > ${metaFile}
#Validating the metadata registry submission json file
echo -ne "Validating the final file ... "
tmp=$(${cardanometa} validate ${metaFile})
checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi
echo -e "\e[32mOK\e[0m"
assetFileJSON=$(cat ${assetFileName})
assetFileJSON=$(jq ". += {sequenceNumber: \"${newSequenceNumber}\", lastUpdate: \"$(date -R)\", lastAction: \"created Token-Registry-JSON\"}" <<< ${assetFileJSON})
file_unlock ${assetFileName}
echo -e "${assetFileJSON}" > ${assetFileName}
file_lock ${assetFileName}
#Moving Submitter JSON into the same directory as the assetFile
assetDir=$(dirname ${assetFileName})
if [[ ! "${assetDir}" == "." ]]; then
echo -ne "Moving final JSON into '${assetDir}' Directory ... "
mv "${metaFile}" "${assetDir}"
checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi
echo -e "\e[32mOK\e[0m"
fi
assetFileLocation="${assetDir}/${assetSubject}.json"; assetFileLocation=${assetFileLocation/#.\//}
echo
echo -e "\e[33mYour Token-Registry-JSON File is now ready to be submitted to: \e[32mhttps://github.com/cardano-foundation/cardano-token-registry"
echo -e "\e[33mas a Pull-Request...\n\nYou can find your file here: \e[32m${assetFileLocation}\e[0m";
echo -e "\e[0m\n"
|
#!/bin/bash
deno run --unstable --import-map=local_import_maps.json --watch -A index.ts serve file:///Users/eltonmarku/resumerise/resumerise_flux/resumerise_theme_retro/mod.ts |
package me.yamakaja.commanditems.data.action;
import com.fasterxml.jackson.annotation.JsonProperty;
import me.yamakaja.commanditems.data.ItemDefinition;
import me.yamakaja.commanditems.interpreter.InterpretationContext;
import java.util.*;
public class ActionMathExpr extends Action {
@JsonProperty(required = true)
private String target;
@JsonProperty(required = true)
private String expr;
@JsonProperty(defaultValue = "false")
private boolean round;
@JsonProperty(required = true)
private Action[] actions;
private transient Expression ast;
public ActionMathExpr() {
super(ActionType.MATH_EXPR);
}
@Override
public void trace(List<ItemDefinition.ExecutionTrace> trace, int depth) {
String line = String.format("%s = %s%s", target, this.round ? "(rounded) " : "", expr);
trace.add(new ItemDefinition.ExecutionTrace(depth, line));
for (Action action : this.actions) action.trace(trace, depth + 1);
}
@Override
public void init() {
try {
this.ast = parse(this.expr);
for (Action action : this.actions) action.init();
} catch (RuntimeException e) {
throw new RuntimeException("Failed to parse math expression: ", e);
}
}
@FunctionalInterface
public
interface Expression {
double eval(Map<String, Double> params);
}
// ====================================================
// Derived from: https://stackoverflow.com/a/26227947
// ====================================================
public static Expression parse(final String str) {
return new Object() {
int pos = -1, ch;
void nextChar() {
ch = (++pos < str.length()) ? str.charAt(pos) : -1;
}
boolean eat(int charToEat) {
while (ch == ' ') nextChar();
if (ch == charToEat) {
nextChar();
return true;
}
return false;
}
Expression parse() {
nextChar();
Expression x = parseExpression();
if (pos < str.length()) throw new RuntimeException("Unexpected: " + (char) ch);
return x;
}
// Grammar:
// expression = term | expression `+` term | expression `-` term
// term = factor | term `*` factor | term `/` factor
// factor = `+` factor | `-` factor | `(` expression `)`
// | number | functionName factor | factor `^` factor
Expression parseExpression() {
Expression x = parseTerm();
for (; ; ) {
if (eat('+')) {
Expression a = x;
Expression b = parseTerm();
x = (params) -> (a.eval(params) + b.eval(params)); // addition
} else if (eat('-')) {
Expression a = x;
Expression b = parseTerm();
x = (params) -> (a.eval(params) - b.eval(params)); // subtraction
} else return x;
}
}
Expression parseTerm() {
Expression x = parseFactor();
for (; ; ) {
if (eat('*')) {
Expression a = x;
Expression b = parseFactor();
x = (params) -> (a.eval(params) * b.eval(params));
} else if (eat('/')) {
Expression a = x;
Expression b = parseFactor();
x = (params) -> (a.eval(params) / b.eval(params));
} else return x;
}
}
Expression parseFactor() {
if (eat('+')) return parseFactor(); // unary plus
if (eat('-')) {
Expression x = parseFactor();
return (params) -> -x.eval(params); // unary minus
}
Expression x;
int startPos = this.pos;
if (eat('(')) { // parentheses
x = parseExpression();
eat(')');
} else if ((ch >= '0' && ch <= '9') || ch == '.') { // numbers
while ((ch >= '0' && ch <= '9') || ch == '.') nextChar();
double res = Double.parseDouble(str.substring(startPos, this.pos));
x = (params) -> res;
} else if (ch >= 'a' && ch <= 'z' || ch >= 'A' && ch <= 'Z') { // symbols. May not start with a number or underscore
while (ch >= 'a' && ch <= 'z' || ch >= 'A' && ch <= 'Z' || ch >= '0' && ch <= '9' || ch == '_')
nextChar();
String symbolName = str.substring(startPos, this.pos);
if (eat('(')) {
switch (symbolName) {
case "sqrt": {
Expression a = parseExpression();
x = (params) -> Math.sqrt(a.eval(params));
break;
}
case "sin": {
Expression a = parseExpression();
x = (params) -> Math.sin(a.eval(params));
break;
}
case "asin": {
Expression a = parseExpression();
x = (params) -> Math.asin(a.eval(params));
break;
}
case "cos": {
Expression a = parseExpression();
x = (params) -> Math.cos(a.eval(params));
break;
}
case "acos": {
Expression a = parseExpression();
x = (params) -> Math.acos(a.eval(params));
break;
}
case "tan": {
Expression a = parseExpression();
x = (params) -> Math.tan(a.eval(params));
break;
}
case "atan": {
Expression a = parseExpression();
x = (params) -> Math.atan(a.eval(params));
break;
}
case "ceil": {
Expression a = parseExpression();
x = (params) -> Math.ceil(a.eval(params));
break;
}
case "floor": {
Expression a = parseExpression();
x = (params) -> Math.floor(a.eval(params));
break;
}
case "abs": {
Expression a = parseExpression();
x = (params) -> Math.abs(a.eval(params));
break;
}
case "exp": {
Expression a = parseExpression();
x = (params) -> Math.exp(a.eval(params));
break;
}
case "log": {
Expression a = parseExpression();
x = (params) -> Math.log(a.eval(params));
break;
}
case "round": {
Expression a = parseExpression();
x = (params) -> Math.round(a.eval(params));
break;
}
case "min": {
List<Expression> expressionList = new ArrayList<>();
do {
Expression a = parseExpression();
expressionList.add(a);
} while (eat(','));
x = (params) -> {
double min = expressionList.get(0).eval(params);
for (int i = 1; i < expressionList.size(); i++) {
double v = expressionList.get(i).eval(params);
if (v < min)
min = v;
}
return min;
};
break;
}
case "max": {
List<Expression> expressionList = new ArrayList<>();
do {
Expression a = parseExpression();
expressionList.add(a);
} while (eat(','));
x = (params) -> {
double max = expressionList.get(0).eval(params);
for (int i = 1; i < expressionList.size(); i++) {
double v = expressionList.get(i).eval(params);
if (v > max)
max = v;
}
return max;
};
break;
}
case "fmod": {
Expression a = parseExpression();
if (!eat(','))
throw new RuntimeException("fmod requires two parameters!");
Expression b = parseExpression();
x = (params) -> a.eval(params) % b.eval(params);
break;
}
case "sign": {
Expression a = parseExpression();
x = (params) -> Math.signum(a.eval(params));
break;
}
case "rand":
x = (params) -> Math.random();
break;
case "randn": {
Random random = new Random();
x = (params) -> random.nextGaussian();
break;
}
default:
throw new RuntimeException("Unknown function: " + symbolName);
}
if (!eat(')'))
throw new RuntimeException("Failed to find closing ')'.");
} else {
// Variable
if ("pi".equals(symbolName))
x = (params) -> Math.PI;
else if ("e".equals(symbolName))
x = (params) -> Math.E;
else x = (params) -> {
if (!params.containsKey(symbolName))
throw new RuntimeException("Tried to access undefined variable: " + symbolName);
return params.get(symbolName);
};
}
} else {
throw new RuntimeException("Unexpected: " + (char) ch);
}
if (eat('^')) {
Expression p = parseFactor();
return (params) -> Math.pow(x.eval(params), p.eval(params)); // exponentiation
}
if (eat('%')) {
Expression m = parseFactor();
return (params) -> x.eval(params) % m.eval(params); // fmod
}
return x;
}
}.parse();
}
// ====================================================
@Override
public void process(InterpretationContext context) {
context.pushFrame();
Map<String, Double> params = new HashMap<>();
context.forEachNumericLocal(params::put);
double rval = this.ast.eval(params);
if (this.round)
context.pushLocal(this.target, Long.toString(Math.round(rval)));
else
context.pushLocal(this.target, String.format("%f", rval));
for (Action action : this.actions)
action.process(context);
context.popFrame();
}
}
|
/* eslint-disable @typescript-eslint/no-explicit-any */
import {
LitElement,
html,
customElement,
property,
CSSResult,
TemplateResult,
css,
internalProperty,
} from 'lit-element';
import { Light } from './light';
import {
HomeAssistant,
LovelaceCardEditor,
getLovelace,
} from 'custom-card-helpers'; // This is a community maintained npm module with common helper functions/types
import './editor';
import './color-wheel';
import './light-editor';
import type { ColorWheelRowConfig } from './types';
import { CARD_VERSION } from './const';
import { localize } from './localize/localize';
/* eslint no-console: 0 */
console.info(
`%c COLOR-WHEEL-ROW \n%c ${localize('common.version')} ${CARD_VERSION} `,
'color: orange; font-weight: bold; background: black',
'color: white; font-weight: bold; background: dimgray',
);
@customElement('color-wheel-row')
export class ColorWheelRow extends LitElement {
public static async getConfigElement(): Promise<LovelaceCardEditor> {
return document.createElement('color-wheel-row-editor');
}
public static getStubConfig(): object {
return {};
}
@property({ attribute: false }) public hass!: HomeAssistant;
@internalProperty() private config!: ColorWheelRowConfig;
@internalProperty() private lights: Light[] = [];
@internalProperty() private selected?: Light;
public setConfig(config: ColorWheelRowConfig): void {
if (!config) {
throw new Error(localize('common.invalid_configuration'));
}
if (config.test_gui) {
getLovelace().setEditMode(true);
}
this.config = config;
}
protected render(): TemplateResult | void {
if (this.config.show_warning) {
return this._showWarning(localize('common.show_warning'));
}
if (this.config.show_error) {
return this._showError(localize('common.show_error'));
}
this.lights = this.config.entities!.map(eid => new Light(this.hass, eid));
return html`
<div class="color-wheel-row">
${this.selected
? html`
<button class="selection" style="border-color: ${this._iconColor()}" @click=${this.toggleSelected}>
<ha-icon style="color: ${this._iconColor()}" .icon=${this.selected.icon}></ha-icon>
<span> ${this.selected?.name}</span>
</button>`
: ''}
<color-wheel .lights=${this.lights} .selected=${this.selected} @select=${this.updateSelected} @color=${this.updateColor} @temperature=${this.updateTemperature}></color-wheel>
</div>
`;
}
updateSelected({ detail: light }: CustomEvent) {
this.selected = light;
const map = new Map();
map.set('selected', this.selected);
this.update(map);
}
toggleSelected() {
if (!this.selected) return;
const selectedIdx = this.lights.findIndex(l => l.id === this.selected!.id);
const nextIdx = selectedIdx >= this.lights.length - 1 ? 0 : selectedIdx + 1;
this.updateSelected({ detail: this.lights[nextIdx] } as CustomEvent);
}
updateColor({ detail: rgb }: CustomEvent) {
this.selected?.setColor(rgb);
}
updateTemperature({ detail: temp }: CustomEvent) {
this.selected?.setTemperature(temp);
}
updateBrightness({ detail: brightness }: CustomEvent) {
this.selected?.setBrightness(brightness);
}
private _iconColor() {
if (!this.selected || !this.selected.on) return 'darkgray';
if (this.selected.rgb.every(c => c === 255)) return 'rgb(253, 216, 53)';
return `rgb(${this.selected.rgb.join(',')})`;
}
private _showWarning(warning: string): TemplateResult {
return html`
<hui-warning>${warning}</hui-warning>
`;
}
private _showError(error: string): TemplateResult {
const errorCard = document.createElement('hui-error-card');
errorCard.setConfig({
type: 'error',
error,
origConfig: this.config,
});
return html`
${errorCard}
`;
}
static get styles(): CSSResult {
return css`
.selection {
background: white;
outline: none;
margin-bottom: 5px;
margin-bottom: 5px;
display: table;
margin: 5px auto;
border: solid 1px;
border-radius: 15px;
padding: 5px 10px;
}
`;
}
}
|
<gh_stars>0
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.android.deskclock.actionbarmenu;
import android.app.Activity;
import android.view.Menu;
import android.view.MenuItem;
/**
* {@link MenuItemController} for handling navigation up button in actionbar. It is a special
* menu item because it's not inflated through menu.xml, and has its own predefined id.
*/
public final class NavUpMenuItemController implements MenuItemController {
private final Activity mActivity;
public NavUpMenuItemController(Activity activity) {
mActivity = activity;
}
@Override
public int getId() {
return android.R.id.home;
}
@Override
public void onCreateOptionsItem(Menu menu) {
// "Home" option is automatically created by the Toolbar.
}
@Override
public void onPrepareOptionsItem(MenuItem item) {
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
mActivity.finish();
return true;
}
}
|
<filename>AV/PIFLA/Regular/RegExFS.js
/*global PIFRAMES */
/* Written by ?? and <NAME> */
$(document).ready(function() {
"use strict";
var av_name = "RegExFS";
var av = new JSAV(av_name);
var Frames = PIFRAMES.init(av_name);
// Frame 1
av.umsg("This frameset presents the definition and some examples for a RegEx.");
av.displayInit();
// Frame 2
av.umsg(Frames.addQuestion("Operators"));
av.step();
// Frame 3
av.umsg(Frames.addQuestion("Concat"));
av.step();
// Frame 4
av.umsg(Frames.addQuestion("ConcatStr"));
av.step();
// Frame 5
av.umsg(Frames.addQuestion("star"));
av.step();
// Frame 6
av.umsg(Frames.addQuestion("alla"));
av.step();
// Frame 7
av.umsg(Frames.addQuestion("ab"));
av.step();
// Frame 8
av.umsg(Frames.addQuestion("abstrings"));
av.step();
// Frame 9
av.umsg(Frames.addQuestion("evena"));
av.step();
// Frame 10
av.umsg(Frames.addQuestion("onea"));
av.step();
// Frame 11
av.umsg("Here is the complete definition for regular expressions on some alphabet $\\Sigma$.<br/><br/>1. <b>Base Cases:</b>$\\lambda$ and $a$ $\\in$ $\\Sigma$ are RE.<br/>2. If $r$ and $s$ are RE, then $(r)$, $r + s$, $rs$, and $r^∗$ are RE.<br/>3. $r$ is a RE if and only if it can be derived from (1) with a finite number of applications of (2).");
av.step();
// Frame 12
av.umsg(Frames.addQuestion("regexstrings"));
av.step();
// Frame 13
av.umsg(Frames.addQuestion("definition"));
av.step();
// Frame 14
av.umsg(Frames.addQuestion("buildregex"));
av.step();
// Frame 15
av.umsg(Frames.addQuestion("moreex"));
av.step();
// Frame 16
av.umsg("Now that we have a concrete definition for the regular expressions themselves, it is easy to define the set of languages that can be defined by regular expressions.");
av.step();
// Frame 17
av.umsg("<b>Definition:</b> $L(r)$ is the language denoted by regular expression $r$.<br/><b>Base Cases:</b> 1. $\\emptyset$, {$\\lambda$}, and $\\{a\\in \\Sigma\\}$ are each languages denoted by some RE.<br/>2. If $r$ and $s$ are RE, then<br/>$\\ \\ L(r + s) = L(r) \\cup L(s)$<br/>$\\ \\ L(rs) = L(r) \\cdot L(s)$<br/>$\\ \\ L((r)) = L(r)$<br/>$\\ \\ L((r)*) = L((r)^*)$");
av.step();
// Frame 18
av.umsg(Frames.addQuestion("empty"));
av.step();
// Frame 19
av.umsg(Frames.addQuestion("abcstar"));
av.step();
// Frame 20
av.umsg(Frames.addQuestion("precedence"));
av.step();
// Frame 21
av.umsg(Frames.addQuestion("plus"));
av.step();
// Frame 22
av.umsg(Frames.addQuestion("oddaevenb"));
av.step();
// Frame 23
av.umsg(Frames.addQuestion("hasempty"));
av.step();
// Frame 24
av.umsg(Frames.addQuestion("has3as"));
av.step();
// Frame 25
av.umsg(Frames.addQuestion("numbers"));
av.step();
// Frame 26
av.umsg(Frames.addQuestion("finite"));
av.step();
// Frame 27
av.umsg("Congratulations! Frameset completed.");
av.recorded();
});
|
#!/bin/bash
# Step 1: Execute the build.sh script to build the model
echo "Building the model..."
if ! ./build.sh; then
echo "Error: Failed to build the model"
exit 1
fi
# Step 2: Run the model_test executable with specific command-line arguments
echo "Running model tests..."
if ! ./build/model_test --single_thread=false --single_instance=false --test_groups=0; then
echo "Error: Failed to run model tests"
exit 1
fi
echo "All tests completed successfully" |
import java.util.ArrayList;
import java.util.HashMap;
// Function to find the shortest possible route
public int shortestRoute(String[] cities, HashMap<String, Integer> distances) {
// Create an ArrayList to store the result
ArrayList<String> shortestRoute = new ArrayList<>();
// Create a variable to store the minimum distance
int minDistance = Integer.MAX_VALUE;
for (String city1 : cities) {
for (String city2 : cities) {
// Create a temp array to store the current route
ArrayList<String> tempRoute = new ArrayList<>(cities);
// Calculate the route distance
int distance = calcDistance(tempRoute, city1, city2, distances);
if (distance < minDistance) {
minDistance = distance;
shortestRoute = tempRoute;
}
}
}
// Return the shortest route distance
return minDistance;
}
// Function to calculate the route distance
private int calcDistance(ArrayList<String> route, String start, String end, HashMap<String, Integer> distances) {
int distance = 0;
boolean returned = false;
// Iterate over the route
for (int i=0; i<route.size()-1; i++) {
// Get the start and end cities of the current route
String city1 = route.get(i);
String city2 = route.get(i+1);
if (city1.equals(start) && city2.equals(end)) {
returned = true;
}
// Calculate the distance between the cities
distance += distances.get(city1 + "-" + city2);
}
// Return to the start city if the route hasn't returned
if (!returned) {
distance += distances.get(end + "-" + start);
}
return distance;
} |
#Store the scores in a dictionary
scores = {'Rafael': 30, 'Gaby': 40}
#Create a function to check if there is a winner
def check_winner(scores):
winner = None
max_score = 0
#Iterate through the dictionary and check the highest score
for key, value in scores.items():
if value > max_score:
max_score = value
winner = key
#Announce the winner
if winner:
print('The winner is {} with the score of {}!'.format(winner, max_score))
else:
print('No winner announced!')
#Call the function
check_winner(scores) |
<gh_stars>1-10
package string_handle;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
*
* @author minchoba
* 백준 4999번: 아!
*
* @see https://www.acmicpc.net/problem/4999/
*
*/
public class Boj4999 {
public static void main(String[] args) throws Exception{
// 버퍼를 통한 값 입력
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
System.out.println(br.readLine().length() >= br.readLine().length() ? "go" : "no"); // 결과 값 출력
}
}
|
<gh_stars>1-10
import log from './log';
export default function() {
var argv = arguments.length
, body = document.body
, doc = document.documentElement
, curr = 0
, total = 1
, view = 1
, regexp = /^(\d+(?:\.\d+)?)(%|view)?$/
, temp
;
if (argv === 1) { // 读操作
body = document.body;
doc = document.documentElement;
switch (arguments[0]) {
case 'top':
total = body.scrollHeight;
curr = body.scrollTop;
view = doc.clientHeight;
break;
case 'bottom':
total = body.scrollHeight;
view = doc.clientHeight;
curr = total - body.scrollTop - view;
break;
case 'left':
total = body.scrollWidth;
curr = body.scrollLeft;
view = doc.clientWidth;
break;
case 'right':
total = body.scrollWidth;
view = doc.clientWidth;
curr = total - body.scrollLeft - view;
break;
default:
break;
}
return {
px: curr
, percent: Math.floor(curr / total * 100)
, view: parseFloat((curr / view).toFixed(1))
};
}
else { // 写操作
temp = regexp.exec(arguments[1]);
if (temp) {
switch (arguments[0]) {
case 'top':
curr = parseFloat(temp[1]);
if (temp[2] === '%') { // 百分比
curr = curr * body.scrollHeight / 100;
}
else if (temp[2] === 'view') { // 屏数
curr = curr * doc.clientHeight;
}
body.scrollTop = curr;
break;
case 'bottom':
curr = parseFloat(temp[1]);
if (temp[2] === '%') { // 百分比
curr = Math.max(body.scrollHeight * (1 - curr / 100), 0);
}
else if (temp[2] === 'view') { // 屏数
curr = Math.max(body.scrollHeight - curr * doc.clientHeight, 0);
}
body.scrollTop = curr;
break;
case 'left':
curr = parseFloat(temp[1]);
if (temp[2] === '%') { // 百分比
curr = curr * body.scrollWidth / 100;
}
else if (temp[2] === 'view') { // 屏数
curr = curr * doc.clientWidth;
}
body.scrollLeft = curr;
break;
case 'right':
curr = parseFloat(temp[1]);
if (temp[2] === '%') { // 百分比
curr = Math.max(body.scrollWidth * (1 - curr / 100), 0);
}
else if (temp[2] === 'view') { // 屏数
curr = Math.max(body.scrollWidth - curr * doc.clientWidth, 0);
}
body.scrollLeft = curr;
break;
default:
break;
}
switch (temp[2]) {
}
if (temp[2] === '%') { // 百分比
total = body.scrollHeight / 100;
}
else if (temp[2] === 'view') { // 屏数
total = doc.clientHeight;
}
else {
total = 1;
}
body[curr] = parseFloat(temp[1]) * total;
}
else {
log('scrollBar 参数设置错误');
}
}
} |
#!/bin/sh
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
set -e
ROOTDIR=dist
BUNDLE="${ROOTDIR}/Monacoin-Qt.app"
CODESIGN=codesign
TEMPDIR=sign.temp
TEMPLIST=${TEMPDIR}/signatures.txt
OUT=signature-osx.tar.gz
OUTROOT=osx
if [ ! -n "$1" ]; then
echo "usage: $0 <codesign args>"
echo "example: $0 -s MyIdentity"
exit 1
fi
rm -rf ${TEMPDIR} ${TEMPLIST}
mkdir -p ${TEMPDIR}
${CODESIGN} -f --file-list ${TEMPLIST} "$@" "${BUNDLE}"
grep -v CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
SIZE=`pagestuff "$i" -p | tail -2 | grep size | sed 's/[^0-9]*//g'`
OFFSET=`pagestuff "$i" -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
SIGNFILE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}.sign"
DIRNAME="`dirname "${SIGNFILE}"`"
mkdir -p "${DIRNAME}"
echo "Adding detached signature for: ${TARGETFILE}. Size: ${SIZE}. Offset: ${OFFSET}"
dd if="$i" of="${SIGNFILE}" bs=1 skip=${OFFSET} count=${SIZE} 2>/dev/null
done
grep CodeResources < "${TEMPLIST}" | while read i; do
TARGETFILE="${BUNDLE}/`echo "${i}" | sed "s|.*${BUNDLE}/||"`"
RESOURCE="${TEMPDIR}/${OUTROOT}/${TARGETFILE}"
DIRNAME="`dirname "${RESOURCE}"`"
mkdir -p "${DIRNAME}"
echo "Adding resource for: \"${TARGETFILE}\""
cp "${i}" "${RESOURCE}"
done
rm ${TEMPLIST}
tar -C "${TEMPDIR}" -czf "${OUT}" .
rm -rf "${TEMPDIR}"
echo "Created ${OUT}"
|
import java.util.*;
class WeightedNode implements Comparable<WeightedNode> {
public String name;
private ArrayList<WeightedNode> neighbors = new ArrayList<WeightedNode>();
private HashMap<WeightedNode, Integer> weightMap = new HashMap<>();
private boolean isVisited = false;
private WeightedNode parent;
private int distance;
private DisjointSet set; //used in DisjointSet Algorithm
public WeightedNode(String name) {
this.name = name;
distance = Integer.MAX_VALUE;
}
public DisjointSet getSet() {
return set;
}
public void setSet(DisjointSet set) { //used in DisjointSet Algorithm
this.set = set;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public ArrayList<WeightedNode> getNeighbors() {
return neighbors;
}
public void setNeighbors(ArrayList<WeightedNode> neighbors) {
this.neighbors = neighbors;
}
public HashMap<WeightedNode, Integer> getWeightMap() {
return weightMap;
}
public void setWeightMap(HashMap<WeightedNode, Integer> weightMap) {
this.weightMap = weightMap;
}
public boolean isVisited() {
return isVisited;
}
public void setVisited(boolean isVisited) {
this.isVisited = isVisited;
}
public WeightedNode getParent() {
return parent;
}
public void setParent(WeightedNode parent) {
this.parent = parent;
}
public int getDistance() {
return distance;
}
public void setDistance(int distance) {
this.distance = distance;
}
@Override
public String toString() {
return name;
}
@Override
public int compareTo(WeightedNode o) {
return this.distance - o.distance;
}
}
class DisjointSet {
private ArrayList<WeightedNode> nodes = new ArrayList<>();
public static void driver(ArrayList<WeightedNode> nodeList){
makeSet(nodeList); //Create Disjoint Sets for each node in this list.
for(int i= 0; i<nodeList.size()-1; i++) {
WeightedNode firstNode = nodeList.get(i);
WeightedNode secondNode = nodeList.get(i+1);
System.out.println("Checking if node "+firstNode.getName()+" and "+secondNode.getName() +" belongs to different set, if yes, will Union them...");
System.out.println("\nFirst Set name is: " + firstNode.getName());
firstNode.getSet().printAllNodesOfThisSet();
System.out.println("\nSecond Set name is: " + secondNode.getName());
secondNode.getSet().printAllNodesOfThisSet();
if(!findSet(firstNode).equals(findSet(secondNode))) {
System.out.println("\nMaking union "+firstNode+" and "+secondNode );
DisjointSet unionedSet = union(firstNode, secondNode);
unionedSet.printAllNodesOfThisSet();
}
System.out.println("\n**************************************\n");
}
}//end of method
public static void makeSet(ArrayList<WeightedNode> nodeList) {
//for each node in list, create a disjoint set
for(WeightedNode node: nodeList) {
DisjointSet set = new DisjointSet();
set.getNodes().add(node);
node.setSet(set);//Storing the reference of this Disjoint set in Node class
}
}//end of method
public static DisjointSet getSet(WeightedNode node) {
return node.getSet();
}//end of method
public static DisjointSet findSet(WeightedNode node) {
return node.getSet();
}//end of method
public static DisjointSet union(WeightedNode node1, WeightedNode node2) {
if(node1.getSet().equals(node2.getSet())) { //if two nodes are of same set then no union needed
return null;
}
else {
//get set object of two nodes
DisjointSet set1 = node1.getSet();
DisjointSet set2 = node2.getSet();
// if first set is bigger then update each node of second set to merge to set1
if(set1.getNodes().size()>set2.getNodes().size()) {
ArrayList<WeightedNode> nodeSet2 = set2.getNodes();
for(WeightedNode node: nodeSet2) { //update each node of second set to merge to set1
node.setSet(set1);
set1.getNodes().add(node);
}
return set1;
}
else {
// if second set is bigger/equal then update each node of first set to merge to set2
ArrayList<WeightedNode> nodeSet1 = set1.getNodes();
for(WeightedNode node: nodeSet1) {//update each node of first set to merge to set2
node.setSet(set2);
set2.getNodes().add(node);
}
return set2;
}//end of inner if-else
}//end of outer if-else
}//end of method
public ArrayList<WeightedNode> getNodes() {
return nodes;
}//end of method
public void setNodes(ArrayList<WeightedNode> nodes) {
this.nodes = nodes;
}//end of method
public void printAllNodesOfThisSet() {
System.out.println("Printing all nodes of the set: ");
for(WeightedNode node: nodes) {
System.out.print(node + " ");
}
System.out.println();
}//end of method
}//end of class
public class DisjointSetMain {
public static void main(String[] args) {
// Constructor for ArrayList
ArrayList<WeightedNode> nodeList = new ArrayList<>();
// create 10 nodes: 1-10
for (int i = 0; i < 10; i++) {
nodeList.add(new WeightedNode("" + (char) (65 + i)));
}
// Calling DisjointSet
DisjointSet.driver(nodeList);
}// end of method
}// end of class
|
#include <iostream>
int linearSearch(int arr[], int size, int key)
{
for (int i = 0; i < size; i++)
{
if (arr[i] == key)
{
return i;
}
}
return -1;
}
int main()
{
int arr[] = {3, 5, -2, 8};
int key = 8;
int size = sizeof(arr)/sizeof(arr[0]);
int index = linearSearch(arr, size, key);
if (index == -1)
{
std::cout << "Element is not present!" << std::endl;
}
else
{
std::cout << "Element is present at index: " << index << std::endl;
}
return 0;
} |
<filename>komponent/example.stories.tsx
import React from 'react';
import Komponent from './src';
export default {
component: Komponent,
parameters: {
componentSubtitle: 'Kort tekst om komponenten',
},
title: 'Komponenter/{{komponent}}',
};
export const standard = () => {
return <Komponent />;
};
|
<filename>app/src/main/java/com/nanchen/rxjava2examples/practice/TestHttpActivity.java
package com.nanchen.rxjava2examples.practice;
import android.app.Activity;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import com.nanchen.rxjava2examples.R;
import com.nanchen.rxjava2examples.practice.bean.Result;
import io.reactivex.Observer;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.functions.Consumer;
import io.reactivex.schedulers.Schedulers;
/**
* Created by TR 105 on 2017/8/5.
*/
public class TestHttpActivity extends Activity {
private Button btn_http;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.test_http_activity);
btn_http = (Button) findViewById(R.id.btn_http);
btn_http.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
http();
}
});
}
private void http() {
new HttpClient()
.getRectService()
.getManagerData(1, 10, 1024)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Consumer<BaseResponse<Result>>() {
@Override
public void accept(BaseResponse<Result> baseResponse) throws Exception {
Log.e("================", baseResponse.rest.toString());
}
}, new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
Log.e("================", throwable.getMessage());
}
});
// new HttpClient()
// .getRectService()
// .getManagerData2(1, 10, 1024).enqueue(new Callback<BaseResponse<Result>>() {
// @Override
// public void onResponse(Call<BaseResponse<Result>> call, Response<BaseResponse<Result>> response) {
// Log.e("================", response.toString());
// }
//
// @Override
// public void onFailure(Call<BaseResponse<Result>> call, Throwable t) {
// Log.e("================", t.getMessage());
// }
// });
// new HttpClient()
// .getRectService()
// .getManagerData(1, 10, 1024)
// .subscribeOn(Schedulers.io())
// .observeOn(AndroidSchedulers.mainThread())
// .subscribe(new Observer<BaseResponse<Result>>() {
// @Override
// public void onSubscribe(Disposable d) {
//
// }
//
// @Override
// public void onNext(BaseResponse<Result> baseResponse) {
// Log.e("================", baseResponse.toString());
// }
//
// @Override
// public void onError(Throwable e) {
//
// }
//
// @Override
// public void onComplete() {
//
// }
// });
}
}
|
<reponame>PaerrePampula/SmartCan_sensor
/*A distance sensor, that is using sonar to measure distances. The recommended and the originally used component is HC-SR04.
Maximum possible distance measured : 400cm
The connections are:
Connect VCC to a 5v power supply, do not use 3.3V! Its not enough for the sensor, and it only gives almost nonexistent results or nothing at all!
Connect Ground to Ground.
Connect ECHO and TRIGGER to any available digital pin on the MCU eg. D9 and D10.
*/
class DistanceSensor {
public:
/** Constructed with required pins..
* @param echoP the pin, where the echo is connected (Output to MCU)
* @param triggerP the pin, where the trigger is connected (Input from MCU)
*/
DistanceSensor(PinName echoP, PinName triggerP);
void startMeasuring();
float getDistanceInCm();
private:
InterruptIn echo;
DigitalOut trigger;
Timer timer;
float calculatedDistance;
Timeout triggerTimeout, echoTimeout;
float minClamp = 2; //The minimum distance possible
float maxClamp = 400; //The max distance possible, the highest correctly measured distance.
void initializeSensor();
void stopTrigger();
void stopTimerAndMeasure();
void startTimer();
}; |
import { withRouter } from 'react-router-dom'
import { actions } from '../../Actions/User'
import { connect } from 'react-redux'
import Register from './Register'
const mapDispatchToProps = dispatch => ({
onRegister({ email, password, firstname, lastname }) {
dispatch(actions.UserCreate(email, password, firstname, lastname))
}
})
export default withRouter(connect(null, mapDispatchToProps)(Register))
|
<gh_stars>1-10
package httpclient
import (
"github.com/fighthorse/redisAdmin/component/conf"
"github.com/mitchellh/mapstructure"
"github.com/prometheus/client_golang/prometheus"
)
var (
remoteCallErrorCount = prometheus.NewCounterVec(
prometheus.CounterOpts{Name: "remote_call_error_count", Help: "remote call error count"},
[]string{"url", "status"})
circuitBreakerCount = prometheus.NewCounterVec(
prometheus.CounterOpts{Name: "circuit_breaker_count", Help: "circuit breaker count"},
[]string{"url"})
remoteCallRequestCount = prometheus.NewCounterVec(
prometheus.CounterOpts{Name: "app_remote_call_request_totals", Help: "http remote call request count"},
[]string{"name", "url"})
remoteCallCodeErrorCount = prometheus.NewCounterVec(
prometheus.CounterOpts{Name: "app_remote_call_api_code_err", Help: "http remote call api code request err count"},
[]string{"service", "url", "code", "msg"})
httpGobreakGauge = prometheus.NewGaugeVec(
prometheus.GaugeOpts{Name: "circuit_breaker_details", Help: "http circuit breaker details"},
[]string{"url"})
)
type HttpServer struct {
SelfServiceName string `yaml:"self_service_name"`
CloseBreaker bool `yaml:"close_breaker"`
BreakerCfg map[string]interface{} `yaml:"breaker_cfg"`
ChildServer map[string]interface{} `yaml:"child_server"`
}
func init() {
prometheus.MustRegister(remoteCallErrorCount)
prometheus.MustRegister(circuitBreakerCount)
prometheus.MustRegister(remoteCallRequestCount)
prometheus.MustRegister(remoteCallCodeErrorCount)
prometheus.MustRegister(httpGobreakGauge)
}
func Init(cfg conf.HttpServer) {
InitSelfService(cfg.SelfServiceName, cfg.CloseBreaker)
}
// selfName 本身服务名称
// closeBreaker 子服务熔断开启 默认开启 ,true 关闭
func InitSelfService(selfName string, closeBreaker bool) {
selfServerName = selfName
isDisableCircuitBreaker = closeBreaker
}
func InitChildService(cfg []map[string]interface{}) {
for _, v := range cfg {
item := &Server{}
if err := mapstructure.Decode(v, item); err == nil {
childServer[item.Name] = item
}
}
}
|
#!/bin/bash
# Script to build all cross and native compilers supported by musl-libc.
# This isn't directly used by toybox, but is useful for testing.
if [ ! -d litecross ]
then
echo Run this script in musl-cross-make directory to make "ccc" directory.
echo
echo " "git clone https://github.com/richfelker/musl-cross-make
echo " "cd musl-cross-make
echo ' ~/toybox/scripts/mcm-buildall.sh'
exit 1
fi
# All toolchains after the first are themselves cross compiled (so they
# can be statically linked against musl on the host, for binary portability.)
# static i686 binaries are basically "poor man's x32".
BOOTSTRAP=i686-linux-musl
[ -z "$OUTPUT" ] && OUTPUT="$PWD/ccc"
if [ "$1" == clean ]
then
rm -rf "$OUTPUT" host-* *.log
make clean
exit
fi
make_toolchain()
{
# Set cross compiler path
LP="$PATH"
if [ -z "$TYPE" ]
then
OUTPUT="$PWD/host-$TARGET"
EXTRASUB=y
else
if [ "$TYPE" == static ]
then
HOST=$BOOTSTRAP
[ "$TARGET" = "$HOST" ] && LP="$PWD/host-$HOST/bin:$LP"
TYPE=cross
EXTRASUB=y
LP="$OUTPUT/$HOST-cross/bin:$LP"
else
HOST="$TARGET"
export NATIVE=y
LP="$OUTPUT/${RENAME:-$TARGET}-cross/bin:$LP"
fi
COMMON_CONFIG="CC=\"$HOST-gcc -static --static\" CXX=\"$HOST-g++ -static --static\""
export -n HOST
OUTPUT="$OUTPUT/${RENAME:-$TARGET}-$TYPE"
fi
if [ -e "$OUTPUT.sqf" ] || [ -e "$OUTPUT/bin/$TARGET-ld" ] ||
[ -e "$OUTPUT/bin/ld" ]
then
return
fi
# Change title bar to say what we're currently building
echo === building $TARGET-$TYPE
echo -en "\033]2;$TARGET-$TYPE\007"
rm -rf build/"$TARGET" "$OUTPUT" &&
if [ -z "$CPUS" ]
then
CPUS="$(nproc)"
[ "$CPUS" != 1 ] && CPUS=$(($CPUS+1))
fi
set -x &&
PATH="$LP" make OUTPUT="$OUTPUT" TARGET="$TARGET" \
GCC_CONFIG="--disable-nls --disable-libquadmath --disable-decimal-float --disable-multilib --enable-languages=c,c++ $GCC_CONFIG" \
COMMON_CONFIG="CFLAGS=\"$CFLAGS -g0 -Os\" CXXFLAGS=\"$CXXFLAGS -g0 -Os\" LDFLAGS=\"$LDFLAGS -s\" $COMMON_CONFIG" \
install -j$CPUS || exit 1
set +x
echo -e '#ifndef __MUSL__\n#define __MUSL__ 1\n#endif' \
>> "$OUTPUT/${EXTRASUB:+$TARGET/}include/features.h"
if [ ! -z "$RENAME" ] && [ "$TYPE" == cross ]
then
CONTEXT="output/$RENAME-cross/bin"
for i in "$CONTEXT/$TARGET-"*
do
X="$(echo $i | sed "s@.*/$TARGET-\([^-]*\)@\1@")"
ln -sf "$TARGET-$X" "$CONTEXT/$RENAME-$X"
done
fi
# Prevent cross compiler reusing dynamically linked host build files for
# $BOOTSTRAP arch
[ -z "$TYPE" ] && make clean
if [ "$TYPE" == native ]
then
# gcc looks in "../usr/include" but not "/bin/../include" (relative to the
# executable). That means /usr/bin/gcc looks in /usr/usr/include, so that's
# not a fix either. So add a NOP symlink as a workaround for The Crazy.
ln -s . "$OUTPUT/usr" || exit 1
[ ! -z "$(which mksquashfs 2>/dev/null)" ] &&
mksquashfs "$OUTPUT" "$OUTPUT.sqf" -all-root &&
[ -z "$CLEANUP" ] && rm -rf "$OUTPUT"
fi
}
# Expand compressed target into binutils/gcc "tuple" and call make_toolchain
make_tuple()
{
PART1=${1/:*/}
PART3=${1/*:/}
PART2=${1:$((${#PART1}+1)):$((${#1}-${#PART3}-${#PART1}-2))}
# Do we need to rename this toolchain after building it?
RENAME=${PART1/*@/}
[ "$RENAME" == "$PART1" ] && RENAME=
PART1=${PART1/@*/}
TARGET=${PART1}-linux-musl${PART2}
[ -z "$NOCLEAN" ] && rm -rf build
for TYPE in static native
do
TYPE=$TYPE TARGET=$TARGET GCC_CONFIG="$PART3" RENAME="$RENAME" \
make_toolchain 2>&1 | tee "$OUTPUT"/log/${RENAME:-$PART1}-${TYPE}.log
done
}
# Packages detect nommu via the absence of fork(). Musl provides a broken fork()
# on nommu builds that always returns -ENOSYS at runtime. Rip it out.
# (Currently only for superh/jcore.)
fix_nommu()
{
# Rich won't merge this
sed -i 's/--enable-fdpic$/& --enable-twoprocess/' litecross/Makefile
PP=patches/musl-"$(sed -n 's/MUSL_VER[ \t]*=[ \t]*//p' Makefile)"
mkdir -p "$PP" &&
cat > "$PP"/0001-nommu.patch << 'EOF'
--- a/include/features.h
+++ b/include/features.h
@@ -3,2 +3,4 @@
+#define __MUSL__ 1
+
#if defined(_ALL_SOURCE) && !defined(_GNU_SOURCE)
--- a/src/legacy/daemon.c
+++ b/src/legacy/daemon.c
@@ -17,3 +17,3 @@
- switch(fork()) {
+ switch(vfork()) {
case 0: break;
@@ -25,3 +25,3 @@
- switch(fork()) {
+ switch(vfork()) {
case 0: break;
--- a/src/misc/forkpty.c
+++ b/src/misc/forkpty.c
@@ -8,2 +8,3 @@
+#ifndef __SH_FDPIC__
int forkpty(int *pm, char *name, const struct termios *tio, const struct winsize *ws)
@@ -57,1 +58,2 @@
}
+#endif
--- a/src/misc/wordexp.c
+++ b/src/misc/wordexp.c
@@ -25,2 +25,3 @@
+#ifndef __SH_FDPIC__
static int do_wordexp(const char *s, wordexp_t *we, int flags)
@@ -177,2 +178,3 @@
}
+#endif
--- a/src/process/fork.c
+++ b/src/process/fork.c
@@ -7,2 +7,3 @@
+#ifndef __SH_FDPIC__
static void dummy(int x)
@@ -37,1 +38,2 @@
}
+#endif
--- a/Makefile
+++ b/Makefile
@@ -100,3 +100,3 @@
cp $< $@
- sed -n -e s/__NR_/SYS_/p < $< >> $@
+ sed -e s/__NR_/SYS_/ < $< >> $@
--- a/arch/sh/bits/syscall.h.in
+++ b/arch/sh/bits/syscall.h.in
@@ -2,3 +2,5 @@
#define __NR_exit 1
+#ifndef __SH_FDPIC__
#define __NR_fork 2
+#endif
#define __NR_read 3
EOF
# I won't sign the FSF's copyright assignment
tee $(for i in patches/gcc-*; do echo $i/099-vfork.patch; done) > /dev/null << 'EOF'
--- gcc-8.3.0/fixincludes/procopen.c 2005-08-14 19:50:43.000000000 -0500
+++ gcc-bak/fixincludes/procopen.c 2020-02-06 23:27:15.408071708 -0600
@@ -116,3 +116,3 @@
*/
- ch_id = fork ();
+ ch_id = vfork ();
switch (ch_id)
EOF
}
fix_nommu || exit 1
mkdir -p "$OUTPUT"/log
# Make bootstrap compiler (no $TYPE, dynamically linked against host libc)
# We build the rest of the cross compilers with this so they're linked against
# musl-libc, because glibc doesn't fully support static linking and dynamic
# binaries aren't really portable between distributions
TARGET=$BOOTSTRAP make_toolchain 2>&1 | tee -a "$OUTPUT/log/$BOOTSTRAP"-host.log
if [ $# -gt 0 ]
then
for i in "$@"
do
make_tuple "$i"
done
else
# Here's the list of cross compilers supported by this build script.
# First target builds a proper version of the $BOOTSTRAP compiler above,
# which is used to build the rest (in alphabetical order)
for i in i686:: \
aarch64:eabi: armv4l:eabihf:"--with-arch=armv5t --with-float=soft" \
"armv5l:eabihf:--with-arch=armv5t --with-float=vfp" \
"armv7l:eabihf:--with-arch=armv7-a --with-float=vfp" \
"armv7m:eabi:--with-arch=armv7-m --with-mode=thumb --disable-libatomic --enable-default-pie" \
armv7r:eabihf:"--with-arch=armv7-r --enable-default-pie" \
i486:: m68k:: microblaze:: mips:: mips64:: mipsel:: powerpc:: \
powerpc64:: powerpc64le:: s390x:: sh2eb:fdpic:--with-cpu=mj2 \
sh4::--enable-incomplete-targets x86_64:: x86_64@x32:x32:
do
make_tuple "$i"
done
fi
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.