file_name
large_stringlengths
4
140
prefix
large_stringlengths
0
39k
suffix
large_stringlengths
0
36.1k
middle
large_stringlengths
0
29.4k
fim_type
large_stringclasses
4 values
cluster_feeder.go
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package input import ( "context" "fmt" "time" apiv1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/fields" "k8s.io/apimachinery/pkg/labels" "k8s.io/apimachinery/pkg/util/wait" "k8s.io/apimachinery/pkg/watch" kube_client "k8s.io/client-go/kubernetes" corev1 "k8s.io/client-go/kubernetes/typed/core/v1" v1lister "k8s.io/client-go/listers/core/v1" "k8s.io/client-go/tools/cache" "k8s.io/klog/v2" vpa_types "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/apis/autoscaling.k8s.io/v1" vpa_api "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/client/clientset/versioned/typed/autoscaling.k8s.io/v1" vpa_lister "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/client/listers/autoscaling.k8s.io/v1" controllerfetcher "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/controller_fetcher" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/history" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/metrics" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/oom" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/spec" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/model" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/target" metrics_recommender "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/utils/metrics/recommender" ) const ( evictionWatchRetryWait = 10 * time.Second evictionWatchJitterFactor = 0.5 // DefaultRecommenderName recommender name explicitly (and so implicitly specify that the default recommender should handle them) DefaultRecommenderName = "default" ) // ClusterStateFeeder can update state of ClusterState object. type ClusterStateFeeder interface { // InitFromHistoryProvider loads historical pod spec into clusterState. InitFromHistoryProvider(historyProvider history.HistoryProvider) // InitFromCheckpoints loads historical checkpoints into clusterState. InitFromCheckpoints() // LoadVPAs updates clusterState with current state of VPAs. LoadVPAs() // LoadPods updates clusterState with current specification of Pods and their Containers. LoadPods() // LoadRealTimeMetrics updates clusterState with current usage metrics of containers. LoadRealTimeMetrics() // GarbageCollectCheckpoints removes historical checkpoints that don't have a matching VPA. GarbageCollectCheckpoints() } // ClusterStateFeederFactory makes instances of ClusterStateFeeder. type ClusterStateFeederFactory struct { ClusterState *model.ClusterState KubeClient kube_client.Interface MetricsClient metrics.MetricsClient VpaCheckpointClient vpa_api.VerticalPodAutoscalerCheckpointsGetter VpaLister vpa_lister.VerticalPodAutoscalerLister PodLister v1lister.PodLister OOMObserver oom.Observer SelectorFetcher target.VpaTargetSelectorFetcher MemorySaveMode bool ControllerFetcher controllerfetcher.ControllerFetcher RecommenderName string } // Make creates new ClusterStateFeeder with internal data providers, based on kube client. func (m ClusterStateFeederFactory) Make() *clusterStateFeeder { return &clusterStateFeeder{ coreClient: m.KubeClient.CoreV1(), metricsClient: m.MetricsClient, oomChan: m.OOMObserver.GetObservedOomsChannel(), vpaCheckpointClient: m.VpaCheckpointClient, vpaLister: m.VpaLister, clusterState: m.ClusterState, specClient: spec.NewSpecClient(m.PodLister), selectorFetcher: m.SelectorFetcher, memorySaveMode: m.MemorySaveMode, controllerFetcher: m.ControllerFetcher, recommenderName: m.RecommenderName, } } // WatchEvictionEventsWithRetries watches new Events with reason=Evicted and passes them to the observer. func WatchEvictionEventsWithRetries(kubeClient kube_client.Interface, observer oom.Observer, namespace string) { go func() { options := metav1.ListOptions{ FieldSelector: "reason=Evicted", } watchEvictionEventsOnce := func() { watchInterface, err := kubeClient.CoreV1().Events(namespace).Watch(context.TODO(), options) if err != nil { klog.Errorf("Cannot initialize watching events. Reason %v", err) return } watchEvictionEvents(watchInterface.ResultChan(), observer) } for { watchEvictionEventsOnce() // Wait between attempts, retrying too often breaks API server. waitTime := wait.Jitter(evictionWatchRetryWait, evictionWatchJitterFactor) klog.V(1).Infof("An attempt to watch eviction events finished. Waiting %v before the next one.", waitTime) time.Sleep(waitTime) } }() } func watchEvictionEvents(evictedEventChan <-chan watch.Event, observer oom.Observer) { for { evictedEvent, ok := <-evictedEventChan if !ok { klog.V(3).Infof("Eviction event chan closed") return } if evictedEvent.Type == watch.Added { evictedEvent, ok := evictedEvent.Object.(*apiv1.Event) if !ok { continue } observer.OnEvent(evictedEvent) } } } // Creates clients watching pods: PodLister (listing only not terminated pods). func newPodClients(kubeClient kube_client.Interface, resourceEventHandler cache.ResourceEventHandler, namespace string) v1lister.PodLister { // We are interested in pods which are Running or Unknown (in case the pod is // running but there are some transient errors we don't want to delete it from // our model). // We don't want to watch Pending pods because they didn't generate any usage // yet. // Succeeded and Failed failed pods don't generate any usage anymore but we // don't necessarily want to immediately delete them. selector := fields.ParseSelectorOrDie("status.phase!=" + string(apiv1.PodPending)) podListWatch := cache.NewListWatchFromClient(kubeClient.CoreV1().RESTClient(), "pods", namespace, selector) indexer, controller := cache.NewIndexerInformer( podListWatch, &apiv1.Pod{}, time.Hour, resourceEventHandler, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, ) podLister := v1lister.NewPodLister(indexer) stopCh := make(chan struct{}) go controller.Run(stopCh) return podLister } // NewPodListerAndOOMObserver creates pair of pod lister and OOM observer. func NewPodListerAndOOMObserver(kubeClient kube_client.Interface, namespace string) (v1lister.PodLister, oom.Observer) { oomObserver := oom.NewObserver() podLister := newPodClients(kubeClient, oomObserver, namespace) WatchEvictionEventsWithRetries(kubeClient, oomObserver, namespace) return podLister, oomObserver } type clusterStateFeeder struct { coreClient corev1.CoreV1Interface specClient spec.SpecClient metricsClient metrics.MetricsClient oomChan <-chan oom.OomInfo vpaCheckpointClient vpa_api.VerticalPodAutoscalerCheckpointsGetter vpaLister vpa_lister.VerticalPodAutoscalerLister clusterState *model.ClusterState selectorFetcher target.VpaTargetSelectorFetcher memorySaveMode bool controllerFetcher controllerfetcher.ControllerFetcher recommenderName string } func (feeder *clusterStateFeeder) InitFromHistoryProvider(historyProvider history.HistoryProvider) { klog.V(3).Info("Initializing VPA from history provider") clusterHistory, err := historyProvider.GetClusterHistory() if err != nil { klog.Errorf("Cannot get cluster history: %v", err) } for podID, podHistory := range clusterHistory { klog.V(4).Infof("Adding pod %v with labels %v", podID, podHistory.LastLabels) feeder.clusterState.AddOrUpdatePod(podID, podHistory.LastLabels, apiv1.PodUnknown) for containerName, sampleList := range podHistory.Samples { containerID := model.ContainerID{ PodID: podID, ContainerName: containerName, } if err = feeder.clusterState.AddOrUpdateContainer(containerID, nil); err != nil { klog.Warningf("Failed to add container %+v. Reason: %+v", containerID, err) } klog.V(4).Infof("Adding %d samples for container %v", len(sampleList), containerID) for _, sample := range sampleList { if err := feeder.clusterState.AddSample( &model.ContainerUsageSampleWithKey{ ContainerUsageSample: sample, Container: containerID, }); err != nil { klog.Warningf("Error adding metric sample for container %v: %v", containerID, err) } } } } } func (feeder *clusterStateFeeder) setVpaCheckpoint(checkpoint *vpa_types.VerticalPodAutoscalerCheckpoint) error { vpaID := model.VpaID{Namespace: checkpoint.Namespace, VpaName: checkpoint.Spec.VPAObjectName} vpa, exists := feeder.clusterState.Vpas[vpaID] if !exists { return fmt.Errorf("cannot load checkpoint to missing VPA object %+v", vpaID) } cs := model.NewAggregateContainerState() err := cs.LoadFromCheckpoint(&checkpoint.Status) if err != nil { return fmt.Errorf("cannot load checkpoint for VPA %+v. Reason: %v", vpa.ID, err) } vpa.ContainersInitialAggregateState[checkpoint.Spec.ContainerName] = cs return nil } func (feeder *clusterStateFeeder) InitFromCheckpoints()
func (feeder *clusterStateFeeder) GarbageCollectCheckpoints() { klog.V(3).Info("Starting garbage collection of checkpoints") feeder.LoadVPAs() namespaceList, err := feeder.coreClient.Namespaces().List(context.TODO(), metav1.ListOptions{}) if err != nil { klog.Errorf("Cannot list namespaces. Reason: %+v", err) return } for _, namespaceItem := range namespaceList.Items { namespace := namespaceItem.Name checkpointList, err := feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).List(context.TODO(), metav1.ListOptions{}) if err != nil { klog.Errorf("Cannot list VPA checkpoints from namespace %v. Reason: %+v", namespace, err) } for _, checkpoint := range checkpointList.Items { vpaID := model.VpaID{Namespace: checkpoint.Namespace, VpaName: checkpoint.Spec.VPAObjectName} _, exists := feeder.clusterState.Vpas[vpaID] if !exists { err = feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).Delete(context.TODO(), checkpoint.Name, metav1.DeleteOptions{}) if err == nil { klog.V(3).Infof("Orphaned VPA checkpoint cleanup - deleting %v/%v.", namespace, checkpoint.Name) } else { klog.Errorf("Cannot delete VPA checkpoint %v/%v. Reason: %+v", namespace, checkpoint.Name, err) } } } } } func implicitDefaultRecommender(selectors []*vpa_types.VerticalPodAutoscalerRecommenderSelector) bool { return len(selectors) == 0 } func selectsRecommender(selectors []*vpa_types.VerticalPodAutoscalerRecommenderSelector, name *string) bool { for _, s := range selectors { if s.Name == *name { return true } } return false } // Filter VPA objects whose specified recommender names are not default func filterVPAs(feeder *clusterStateFeeder, allVpaCRDs []*vpa_types.VerticalPodAutoscaler) []*vpa_types.VerticalPodAutoscaler { klog.V(3).Infof("Start selecting the vpaCRDs.") var vpaCRDs []*vpa_types.VerticalPodAutoscaler for _, vpaCRD := range allVpaCRDs { if feeder.recommenderName == DefaultRecommenderName { if !implicitDefaultRecommender(vpaCRD.Spec.Recommenders) && !selectsRecommender(vpaCRD.Spec.Recommenders, &feeder.recommenderName) { klog.V(6).Infof("Ignoring vpaCRD %s in namespace %s as current recommender's name %v doesn't appear among its recommenders", vpaCRD.Name, vpaCRD.Namespace, feeder.recommenderName) continue } } else { if implicitDefaultRecommender(vpaCRD.Spec.Recommenders) { klog.V(6).Infof("Ignoring vpaCRD %s in namespace %s as %v recommender doesn't process CRDs implicitly destined to %v recommender", vpaCRD.Name, vpaCRD.Namespace, feeder.recommenderName, DefaultRecommenderName) continue } if !selectsRecommender(vpaCRD.Spec.Recommenders, &feeder.recommenderName) { klog.V(6).Infof("Ignoring vpaCRD %s in namespace %s as current recommender's name %v doesn't appear among its recommenders", vpaCRD.Name, vpaCRD.Namespace, feeder.recommenderName) continue } } vpaCRDs = append(vpaCRDs, vpaCRD) } return vpaCRDs } // LoadVPAs fetches VPA objects and loads them into the cluster state. func (feeder *clusterStateFeeder) LoadVPAs() { // List VPA API objects. allVpaCRDs, err := feeder.vpaLister.List(labels.Everything()) if err != nil { klog.Errorf("Cannot list VPAs. Reason: %+v", err) return } // Filter out VPAs that specified recommenders with names not equal to "default" vpaCRDs := filterVPAs(feeder, allVpaCRDs) klog.V(3).Infof("Fetched %d VPAs.", len(vpaCRDs)) // Add or update existing VPAs in the model. vpaKeys := make(map[model.VpaID]bool) for _, vpaCRD := range vpaCRDs { vpaID := model.VpaID{ Namespace: vpaCRD.Namespace, VpaName: vpaCRD.Name, } selector, conditions := feeder.getSelector(vpaCRD) klog.V(4).Infof("Using selector %s for VPA %s/%s", selector.String(), vpaCRD.Namespace, vpaCRD.Name) if feeder.clusterState.AddOrUpdateVpa(vpaCRD, selector) == nil { // Successfully added VPA to the model. vpaKeys[vpaID] = true for _, condition := range conditions { if condition.delete { delete(feeder.clusterState.Vpas[vpaID].Conditions, condition.conditionType) } else { feeder.clusterState.Vpas[vpaID].Conditions.Set(condition.conditionType, true, "", condition.message) } } } } // Delete non-existent VPAs from the model. for vpaID := range feeder.clusterState.Vpas { if _, exists := vpaKeys[vpaID]; !exists { klog.V(3).Infof("Deleting VPA %v", vpaID) if err := feeder.clusterState.DeleteVpa(vpaID); err != nil { klog.Errorf("Deleting VPA %v failed: %v", vpaID, err) } } } feeder.clusterState.ObservedVpas = vpaCRDs } // LoadPods loads pod into the cluster state. func (feeder *clusterStateFeeder) LoadPods() { podSpecs, err := feeder.specClient.GetPodSpecs() if err != nil { klog.Errorf("Cannot get SimplePodSpecs. Reason: %+v", err) } pods := make(map[model.PodID]*spec.BasicPodSpec) for _, spec := range podSpecs { pods[spec.ID] = spec } for key := range feeder.clusterState.Pods { if _, exists := pods[key]; !exists { klog.V(3).Infof("Deleting Pod %v", key) feeder.clusterState.DeletePod(key) } } for _, pod := range pods { if feeder.memorySaveMode && !feeder.matchesVPA(pod) { continue } feeder.clusterState.AddOrUpdatePod(pod.ID, pod.PodLabels, pod.Phase) for _, container := range pod.Containers { if err = feeder.clusterState.AddOrUpdateContainer(container.ID, container.Request); err != nil { klog.Warningf("Failed to add container %+v. Reason: %+v", container.ID, err) } } } } func (feeder *clusterStateFeeder) LoadRealTimeMetrics() { containersMetrics, err := feeder.metricsClient.GetContainersMetrics() if err != nil { klog.Errorf("Cannot get ContainerMetricsSnapshot from MetricsClient. Reason: %+v", err) } sampleCount := 0 droppedSampleCount := 0 for _, containerMetrics := range containersMetrics { for _, sample := range newContainerUsageSamplesWithKey(containerMetrics) { if err := feeder.clusterState.AddSample(sample); err != nil { // Not all pod states are tracked in memory saver mode if _, isKeyError := err.(model.KeyError); isKeyError && feeder.memorySaveMode { continue } klog.Warningf("Error adding metric sample for container %v: %v", sample.Container, err) droppedSampleCount++ } else { sampleCount++ } } } klog.V(3).Infof("ClusterSpec fed with #%v ContainerUsageSamples for #%v containers. Dropped #%v samples.", sampleCount, len(containersMetrics), droppedSampleCount) Loop: for { select { case oomInfo := <-feeder.oomChan: klog.V(3).Infof("OOM detected %+v", oomInfo) if err = feeder.clusterState.RecordOOM(oomInfo.ContainerID, oomInfo.Timestamp, oomInfo.Memory); err != nil { klog.Warningf("Failed to record OOM %+v. Reason: %+v", oomInfo, err) } default: break Loop } } metrics_recommender.RecordAggregateContainerStatesCount(feeder.clusterState.StateMapSize()) } func (feeder *clusterStateFeeder) matchesVPA(pod *spec.BasicPodSpec) bool { for vpaKey, vpa := range feeder.clusterState.Vpas { podLabels := labels.Set(pod.PodLabels) if vpaKey.Namespace == pod.ID.Namespace && vpa.PodSelector.Matches(podLabels) { return true } } return false } func newContainerUsageSamplesWithKey(metrics *metrics.ContainerMetricsSnapshot) []*model.ContainerUsageSampleWithKey { var samples []*model.ContainerUsageSampleWithKey for metricName, resourceAmount := range metrics.Usage { sample := &model.ContainerUsageSampleWithKey{ Container: metrics.ID, ContainerUsageSample: model.ContainerUsageSample{ MeasureStart: metrics.SnapshotTime, Resource: metricName, Usage: resourceAmount, }, } samples = append(samples, sample) } return samples } type condition struct { conditionType vpa_types.VerticalPodAutoscalerConditionType delete bool message string } func (feeder *clusterStateFeeder) validateTargetRef(vpa *vpa_types.VerticalPodAutoscaler) (bool, condition) { // if vpa.Spec.TargetRef == nil { return false, condition{} } k := controllerfetcher.ControllerKeyWithAPIVersion{ ControllerKey: controllerfetcher.ControllerKey{ Namespace: vpa.Namespace, Kind: vpa.Spec.TargetRef.Kind, Name: vpa.Spec.TargetRef.Name, }, ApiVersion: vpa.Spec.TargetRef.APIVersion, } top, err := feeder.controllerFetcher.FindTopMostWellKnownOrScalable(&k) if err != nil { return false, condition{conditionType: vpa_types.ConfigUnsupported, delete: false, message: fmt.Sprintf("Error checking if target is a topmost well-known or scalable controller: %s", err)} } if top == nil { return false, condition{conditionType: vpa_types.ConfigUnsupported, delete: false, message: fmt.Sprintf("Unknown error during checking if target is a topmost well-known or scalable controller: %s", err)} } if *top != k { return false, condition{conditionType: vpa_types.ConfigUnsupported, delete: false, message: "The targetRef controller has a parent but it should point to a topmost well-known or scalable controller"} } return true, condition{} } func (feeder *clusterStateFeeder) getSelector(vpa *vpa_types.VerticalPodAutoscaler) (labels.Selector, []condition) { selector, fetchErr := feeder.selectorFetcher.Fetch(vpa) if selector != nil { validTargetRef, unsupportedCondition := feeder.validateTargetRef(vpa) if !validTargetRef { return labels.Nothing(), []condition{ unsupportedCondition, {conditionType: vpa_types.ConfigDeprecated, delete: true}, } } return selector, []condition{ {conditionType: vpa_types.ConfigUnsupported, delete: true}, {conditionType: vpa_types.ConfigDeprecated, delete: true}, } } msg := "Cannot read targetRef" if fetchErr != nil { klog.Errorf("Cannot get target selector from VPA's targetRef. Reason: %+v", fetchErr) msg = fmt.Sprintf("Cannot read targetRef. Reason: %s", fetchErr.Error()) } return labels.Nothing(), []condition{ {conditionType: vpa_types.ConfigUnsupported, delete: false, message: msg}, {conditionType: vpa_types.ConfigDeprecated, delete: true}, } }
{ klog.V(3).Info("Initializing VPA from checkpoints") feeder.LoadVPAs() namespaces := make(map[string]bool) for _, v := range feeder.clusterState.Vpas { namespaces[v.ID.Namespace] = true } for namespace := range namespaces { klog.V(3).Infof("Fetching checkpoints from namespace %s", namespace) checkpointList, err := feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).List(context.TODO(), metav1.ListOptions{}) if err != nil { klog.Errorf("Cannot list VPA checkpoints from namespace %v. Reason: %+v", namespace, err) } for _, checkpoint := range checkpointList.Items { klog.V(3).Infof("Loading VPA %s/%s checkpoint for %s", checkpoint.ObjectMeta.Namespace, checkpoint.Spec.VPAObjectName, checkpoint.Spec.ContainerName) err = feeder.setVpaCheckpoint(&checkpoint) if err != nil { klog.Errorf("Error while loading checkpoint. Reason: %+v", err) } } } }
identifier_body
cluster_feeder.go
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package input import ( "context" "fmt" "time" apiv1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/fields" "k8s.io/apimachinery/pkg/labels" "k8s.io/apimachinery/pkg/util/wait" "k8s.io/apimachinery/pkg/watch" kube_client "k8s.io/client-go/kubernetes" corev1 "k8s.io/client-go/kubernetes/typed/core/v1" v1lister "k8s.io/client-go/listers/core/v1" "k8s.io/client-go/tools/cache" "k8s.io/klog/v2" vpa_types "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/apis/autoscaling.k8s.io/v1" vpa_api "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/client/clientset/versioned/typed/autoscaling.k8s.io/v1" vpa_lister "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/client/listers/autoscaling.k8s.io/v1" controllerfetcher "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/controller_fetcher" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/history" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/metrics" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/oom" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/spec" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/model" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/target" metrics_recommender "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/utils/metrics/recommender" ) const ( evictionWatchRetryWait = 10 * time.Second evictionWatchJitterFactor = 0.5 // DefaultRecommenderName recommender name explicitly (and so implicitly specify that the default recommender should handle them) DefaultRecommenderName = "default" ) // ClusterStateFeeder can update state of ClusterState object. type ClusterStateFeeder interface { // InitFromHistoryProvider loads historical pod spec into clusterState. InitFromHistoryProvider(historyProvider history.HistoryProvider) // InitFromCheckpoints loads historical checkpoints into clusterState. InitFromCheckpoints() // LoadVPAs updates clusterState with current state of VPAs. LoadVPAs() // LoadPods updates clusterState with current specification of Pods and their Containers. LoadPods() // LoadRealTimeMetrics updates clusterState with current usage metrics of containers. LoadRealTimeMetrics() // GarbageCollectCheckpoints removes historical checkpoints that don't have a matching VPA. GarbageCollectCheckpoints() } // ClusterStateFeederFactory makes instances of ClusterStateFeeder. type ClusterStateFeederFactory struct { ClusterState *model.ClusterState KubeClient kube_client.Interface MetricsClient metrics.MetricsClient VpaCheckpointClient vpa_api.VerticalPodAutoscalerCheckpointsGetter VpaLister vpa_lister.VerticalPodAutoscalerLister PodLister v1lister.PodLister OOMObserver oom.Observer SelectorFetcher target.VpaTargetSelectorFetcher MemorySaveMode bool ControllerFetcher controllerfetcher.ControllerFetcher RecommenderName string } // Make creates new ClusterStateFeeder with internal data providers, based on kube client. func (m ClusterStateFeederFactory) Make() *clusterStateFeeder { return &clusterStateFeeder{ coreClient: m.KubeClient.CoreV1(), metricsClient: m.MetricsClient, oomChan: m.OOMObserver.GetObservedOomsChannel(), vpaCheckpointClient: m.VpaCheckpointClient, vpaLister: m.VpaLister, clusterState: m.ClusterState, specClient: spec.NewSpecClient(m.PodLister), selectorFetcher: m.SelectorFetcher, memorySaveMode: m.MemorySaveMode, controllerFetcher: m.ControllerFetcher, recommenderName: m.RecommenderName, } } // WatchEvictionEventsWithRetries watches new Events with reason=Evicted and passes them to the observer. func WatchEvictionEventsWithRetries(kubeClient kube_client.Interface, observer oom.Observer, namespace string) { go func() { options := metav1.ListOptions{ FieldSelector: "reason=Evicted", } watchEvictionEventsOnce := func() { watchInterface, err := kubeClient.CoreV1().Events(namespace).Watch(context.TODO(), options) if err != nil { klog.Errorf("Cannot initialize watching events. Reason %v", err) return } watchEvictionEvents(watchInterface.ResultChan(), observer) } for { watchEvictionEventsOnce() // Wait between attempts, retrying too often breaks API server. waitTime := wait.Jitter(evictionWatchRetryWait, evictionWatchJitterFactor) klog.V(1).Infof("An attempt to watch eviction events finished. Waiting %v before the next one.", waitTime) time.Sleep(waitTime) } }() } func watchEvictionEvents(evictedEventChan <-chan watch.Event, observer oom.Observer) { for { evictedEvent, ok := <-evictedEventChan if !ok { klog.V(3).Infof("Eviction event chan closed") return } if evictedEvent.Type == watch.Added { evictedEvent, ok := evictedEvent.Object.(*apiv1.Event) if !ok { continue } observer.OnEvent(evictedEvent) } } } // Creates clients watching pods: PodLister (listing only not terminated pods). func newPodClients(kubeClient kube_client.Interface, resourceEventHandler cache.ResourceEventHandler, namespace string) v1lister.PodLister { // We are interested in pods which are Running or Unknown (in case the pod is // running but there are some transient errors we don't want to delete it from // our model). // We don't want to watch Pending pods because they didn't generate any usage // yet. // Succeeded and Failed failed pods don't generate any usage anymore but we // don't necessarily want to immediately delete them. selector := fields.ParseSelectorOrDie("status.phase!=" + string(apiv1.PodPending)) podListWatch := cache.NewListWatchFromClient(kubeClient.CoreV1().RESTClient(), "pods", namespace, selector) indexer, controller := cache.NewIndexerInformer( podListWatch, &apiv1.Pod{}, time.Hour, resourceEventHandler, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, ) podLister := v1lister.NewPodLister(indexer) stopCh := make(chan struct{}) go controller.Run(stopCh) return podLister } // NewPodListerAndOOMObserver creates pair of pod lister and OOM observer. func NewPodListerAndOOMObserver(kubeClient kube_client.Interface, namespace string) (v1lister.PodLister, oom.Observer) { oomObserver := oom.NewObserver() podLister := newPodClients(kubeClient, oomObserver, namespace) WatchEvictionEventsWithRetries(kubeClient, oomObserver, namespace) return podLister, oomObserver } type clusterStateFeeder struct { coreClient corev1.CoreV1Interface specClient spec.SpecClient metricsClient metrics.MetricsClient oomChan <-chan oom.OomInfo vpaCheckpointClient vpa_api.VerticalPodAutoscalerCheckpointsGetter vpaLister vpa_lister.VerticalPodAutoscalerLister clusterState *model.ClusterState selectorFetcher target.VpaTargetSelectorFetcher memorySaveMode bool controllerFetcher controllerfetcher.ControllerFetcher recommenderName string } func (feeder *clusterStateFeeder) InitFromHistoryProvider(historyProvider history.HistoryProvider) { klog.V(3).Info("Initializing VPA from history provider") clusterHistory, err := historyProvider.GetClusterHistory() if err != nil { klog.Errorf("Cannot get cluster history: %v", err) } for podID, podHistory := range clusterHistory { klog.V(4).Infof("Adding pod %v with labels %v", podID, podHistory.LastLabels) feeder.clusterState.AddOrUpdatePod(podID, podHistory.LastLabels, apiv1.PodUnknown) for containerName, sampleList := range podHistory.Samples { containerID := model.ContainerID{ PodID: podID, ContainerName: containerName, } if err = feeder.clusterState.AddOrUpdateContainer(containerID, nil); err != nil { klog.Warningf("Failed to add container %+v. Reason: %+v", containerID, err) } klog.V(4).Infof("Adding %d samples for container %v", len(sampleList), containerID) for _, sample := range sampleList { if err := feeder.clusterState.AddSample( &model.ContainerUsageSampleWithKey{ ContainerUsageSample: sample, Container: containerID, }); err != nil { klog.Warningf("Error adding metric sample for container %v: %v", containerID, err) } } } } } func (feeder *clusterStateFeeder) setVpaCheckpoint(checkpoint *vpa_types.VerticalPodAutoscalerCheckpoint) error { vpaID := model.VpaID{Namespace: checkpoint.Namespace, VpaName: checkpoint.Spec.VPAObjectName} vpa, exists := feeder.clusterState.Vpas[vpaID] if !exists { return fmt.Errorf("cannot load checkpoint to missing VPA object %+v", vpaID) } cs := model.NewAggregateContainerState() err := cs.LoadFromCheckpoint(&checkpoint.Status) if err != nil { return fmt.Errorf("cannot load checkpoint for VPA %+v. Reason: %v", vpa.ID, err) } vpa.ContainersInitialAggregateState[checkpoint.Spec.ContainerName] = cs return nil } func (feeder *clusterStateFeeder) InitFromCheckpoints() { klog.V(3).Info("Initializing VPA from checkpoints") feeder.LoadVPAs() namespaces := make(map[string]bool) for _, v := range feeder.clusterState.Vpas { namespaces[v.ID.Namespace] = true } for namespace := range namespaces { klog.V(3).Infof("Fetching checkpoints from namespace %s", namespace) checkpointList, err := feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).List(context.TODO(), metav1.ListOptions{}) if err != nil { klog.Errorf("Cannot list VPA checkpoints from namespace %v. Reason: %+v", namespace, err) } for _, checkpoint := range checkpointList.Items { klog.V(3).Infof("Loading VPA %s/%s checkpoint for %s", checkpoint.ObjectMeta.Namespace, checkpoint.Spec.VPAObjectName, checkpoint.Spec.ContainerName) err = feeder.setVpaCheckpoint(&checkpoint) if err != nil { klog.Errorf("Error while loading checkpoint. Reason: %+v", err) } } } } func (feeder *clusterStateFeeder) GarbageCollectCheckpoints() { klog.V(3).Info("Starting garbage collection of checkpoints") feeder.LoadVPAs() namespaceList, err := feeder.coreClient.Namespaces().List(context.TODO(), metav1.ListOptions{}) if err != nil { klog.Errorf("Cannot list namespaces. Reason: %+v", err) return } for _, namespaceItem := range namespaceList.Items { namespace := namespaceItem.Name checkpointList, err := feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).List(context.TODO(), metav1.ListOptions{}) if err != nil { klog.Errorf("Cannot list VPA checkpoints from namespace %v. Reason: %+v", namespace, err) } for _, checkpoint := range checkpointList.Items { vpaID := model.VpaID{Namespace: checkpoint.Namespace, VpaName: checkpoint.Spec.VPAObjectName} _, exists := feeder.clusterState.Vpas[vpaID] if !exists { err = feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).Delete(context.TODO(), checkpoint.Name, metav1.DeleteOptions{}) if err == nil { klog.V(3).Infof("Orphaned VPA checkpoint cleanup - deleting %v/%v.", namespace, checkpoint.Name) } else { klog.Errorf("Cannot delete VPA checkpoint %v/%v. Reason: %+v", namespace, checkpoint.Name, err) } } } } } func implicitDefaultRecommender(selectors []*vpa_types.VerticalPodAutoscalerRecommenderSelector) bool { return len(selectors) == 0 } func selectsRecommender(selectors []*vpa_types.VerticalPodAutoscalerRecommenderSelector, name *string) bool { for _, s := range selectors { if s.Name == *name { return true } } return false } // Filter VPA objects whose specified recommender names are not default func filterVPAs(feeder *clusterStateFeeder, allVpaCRDs []*vpa_types.VerticalPodAutoscaler) []*vpa_types.VerticalPodAutoscaler { klog.V(3).Infof("Start selecting the vpaCRDs.") var vpaCRDs []*vpa_types.VerticalPodAutoscaler for _, vpaCRD := range allVpaCRDs { if feeder.recommenderName == DefaultRecommenderName { if !implicitDefaultRecommender(vpaCRD.Spec.Recommenders) && !selectsRecommender(vpaCRD.Spec.Recommenders, &feeder.recommenderName) { klog.V(6).Infof("Ignoring vpaCRD %s in namespace %s as current recommender's name %v doesn't appear among its recommenders", vpaCRD.Name, vpaCRD.Namespace, feeder.recommenderName) continue } } else { if implicitDefaultRecommender(vpaCRD.Spec.Recommenders) { klog.V(6).Infof("Ignoring vpaCRD %s in namespace %s as %v recommender doesn't process CRDs implicitly destined to %v recommender", vpaCRD.Name, vpaCRD.Namespace, feeder.recommenderName, DefaultRecommenderName) continue } if !selectsRecommender(vpaCRD.Spec.Recommenders, &feeder.recommenderName) { klog.V(6).Infof("Ignoring vpaCRD %s in namespace %s as current recommender's name %v doesn't appear among its recommenders", vpaCRD.Name, vpaCRD.Namespace, feeder.recommenderName) continue } } vpaCRDs = append(vpaCRDs, vpaCRD) } return vpaCRDs } // LoadVPAs fetches VPA objects and loads them into the cluster state. func (feeder *clusterStateFeeder) LoadVPAs() { // List VPA API objects. allVpaCRDs, err := feeder.vpaLister.List(labels.Everything()) if err != nil { klog.Errorf("Cannot list VPAs. Reason: %+v", err) return } // Filter out VPAs that specified recommenders with names not equal to "default" vpaCRDs := filterVPAs(feeder, allVpaCRDs) klog.V(3).Infof("Fetched %d VPAs.", len(vpaCRDs)) // Add or update existing VPAs in the model. vpaKeys := make(map[model.VpaID]bool) for _, vpaCRD := range vpaCRDs { vpaID := model.VpaID{ Namespace: vpaCRD.Namespace,
} selector, conditions := feeder.getSelector(vpaCRD) klog.V(4).Infof("Using selector %s for VPA %s/%s", selector.String(), vpaCRD.Namespace, vpaCRD.Name) if feeder.clusterState.AddOrUpdateVpa(vpaCRD, selector) == nil { // Successfully added VPA to the model. vpaKeys[vpaID] = true for _, condition := range conditions { if condition.delete { delete(feeder.clusterState.Vpas[vpaID].Conditions, condition.conditionType) } else { feeder.clusterState.Vpas[vpaID].Conditions.Set(condition.conditionType, true, "", condition.message) } } } } // Delete non-existent VPAs from the model. for vpaID := range feeder.clusterState.Vpas { if _, exists := vpaKeys[vpaID]; !exists { klog.V(3).Infof("Deleting VPA %v", vpaID) if err := feeder.clusterState.DeleteVpa(vpaID); err != nil { klog.Errorf("Deleting VPA %v failed: %v", vpaID, err) } } } feeder.clusterState.ObservedVpas = vpaCRDs } // LoadPods loads pod into the cluster state. func (feeder *clusterStateFeeder) LoadPods() { podSpecs, err := feeder.specClient.GetPodSpecs() if err != nil { klog.Errorf("Cannot get SimplePodSpecs. Reason: %+v", err) } pods := make(map[model.PodID]*spec.BasicPodSpec) for _, spec := range podSpecs { pods[spec.ID] = spec } for key := range feeder.clusterState.Pods { if _, exists := pods[key]; !exists { klog.V(3).Infof("Deleting Pod %v", key) feeder.clusterState.DeletePod(key) } } for _, pod := range pods { if feeder.memorySaveMode && !feeder.matchesVPA(pod) { continue } feeder.clusterState.AddOrUpdatePod(pod.ID, pod.PodLabels, pod.Phase) for _, container := range pod.Containers { if err = feeder.clusterState.AddOrUpdateContainer(container.ID, container.Request); err != nil { klog.Warningf("Failed to add container %+v. Reason: %+v", container.ID, err) } } } } func (feeder *clusterStateFeeder) LoadRealTimeMetrics() { containersMetrics, err := feeder.metricsClient.GetContainersMetrics() if err != nil { klog.Errorf("Cannot get ContainerMetricsSnapshot from MetricsClient. Reason: %+v", err) } sampleCount := 0 droppedSampleCount := 0 for _, containerMetrics := range containersMetrics { for _, sample := range newContainerUsageSamplesWithKey(containerMetrics) { if err := feeder.clusterState.AddSample(sample); err != nil { // Not all pod states are tracked in memory saver mode if _, isKeyError := err.(model.KeyError); isKeyError && feeder.memorySaveMode { continue } klog.Warningf("Error adding metric sample for container %v: %v", sample.Container, err) droppedSampleCount++ } else { sampleCount++ } } } klog.V(3).Infof("ClusterSpec fed with #%v ContainerUsageSamples for #%v containers. Dropped #%v samples.", sampleCount, len(containersMetrics), droppedSampleCount) Loop: for { select { case oomInfo := <-feeder.oomChan: klog.V(3).Infof("OOM detected %+v", oomInfo) if err = feeder.clusterState.RecordOOM(oomInfo.ContainerID, oomInfo.Timestamp, oomInfo.Memory); err != nil { klog.Warningf("Failed to record OOM %+v. Reason: %+v", oomInfo, err) } default: break Loop } } metrics_recommender.RecordAggregateContainerStatesCount(feeder.clusterState.StateMapSize()) } func (feeder *clusterStateFeeder) matchesVPA(pod *spec.BasicPodSpec) bool { for vpaKey, vpa := range feeder.clusterState.Vpas { podLabels := labels.Set(pod.PodLabels) if vpaKey.Namespace == pod.ID.Namespace && vpa.PodSelector.Matches(podLabels) { return true } } return false } func newContainerUsageSamplesWithKey(metrics *metrics.ContainerMetricsSnapshot) []*model.ContainerUsageSampleWithKey { var samples []*model.ContainerUsageSampleWithKey for metricName, resourceAmount := range metrics.Usage { sample := &model.ContainerUsageSampleWithKey{ Container: metrics.ID, ContainerUsageSample: model.ContainerUsageSample{ MeasureStart: metrics.SnapshotTime, Resource: metricName, Usage: resourceAmount, }, } samples = append(samples, sample) } return samples } type condition struct { conditionType vpa_types.VerticalPodAutoscalerConditionType delete bool message string } func (feeder *clusterStateFeeder) validateTargetRef(vpa *vpa_types.VerticalPodAutoscaler) (bool, condition) { // if vpa.Spec.TargetRef == nil { return false, condition{} } k := controllerfetcher.ControllerKeyWithAPIVersion{ ControllerKey: controllerfetcher.ControllerKey{ Namespace: vpa.Namespace, Kind: vpa.Spec.TargetRef.Kind, Name: vpa.Spec.TargetRef.Name, }, ApiVersion: vpa.Spec.TargetRef.APIVersion, } top, err := feeder.controllerFetcher.FindTopMostWellKnownOrScalable(&k) if err != nil { return false, condition{conditionType: vpa_types.ConfigUnsupported, delete: false, message: fmt.Sprintf("Error checking if target is a topmost well-known or scalable controller: %s", err)} } if top == nil { return false, condition{conditionType: vpa_types.ConfigUnsupported, delete: false, message: fmt.Sprintf("Unknown error during checking if target is a topmost well-known or scalable controller: %s", err)} } if *top != k { return false, condition{conditionType: vpa_types.ConfigUnsupported, delete: false, message: "The targetRef controller has a parent but it should point to a topmost well-known or scalable controller"} } return true, condition{} } func (feeder *clusterStateFeeder) getSelector(vpa *vpa_types.VerticalPodAutoscaler) (labels.Selector, []condition) { selector, fetchErr := feeder.selectorFetcher.Fetch(vpa) if selector != nil { validTargetRef, unsupportedCondition := feeder.validateTargetRef(vpa) if !validTargetRef { return labels.Nothing(), []condition{ unsupportedCondition, {conditionType: vpa_types.ConfigDeprecated, delete: true}, } } return selector, []condition{ {conditionType: vpa_types.ConfigUnsupported, delete: true}, {conditionType: vpa_types.ConfigDeprecated, delete: true}, } } msg := "Cannot read targetRef" if fetchErr != nil { klog.Errorf("Cannot get target selector from VPA's targetRef. Reason: %+v", fetchErr) msg = fmt.Sprintf("Cannot read targetRef. Reason: %s", fetchErr.Error()) } return labels.Nothing(), []condition{ {conditionType: vpa_types.ConfigUnsupported, delete: false, message: msg}, {conditionType: vpa_types.ConfigDeprecated, delete: true}, } }
VpaName: vpaCRD.Name,
random_line_split
cluster_feeder.go
/* Copyright 2018 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package input import ( "context" "fmt" "time" apiv1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/fields" "k8s.io/apimachinery/pkg/labels" "k8s.io/apimachinery/pkg/util/wait" "k8s.io/apimachinery/pkg/watch" kube_client "k8s.io/client-go/kubernetes" corev1 "k8s.io/client-go/kubernetes/typed/core/v1" v1lister "k8s.io/client-go/listers/core/v1" "k8s.io/client-go/tools/cache" "k8s.io/klog/v2" vpa_types "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/apis/autoscaling.k8s.io/v1" vpa_api "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/client/clientset/versioned/typed/autoscaling.k8s.io/v1" vpa_lister "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/client/listers/autoscaling.k8s.io/v1" controllerfetcher "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/controller_fetcher" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/history" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/metrics" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/oom" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/input/spec" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/model" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/target" metrics_recommender "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/utils/metrics/recommender" ) const ( evictionWatchRetryWait = 10 * time.Second evictionWatchJitterFactor = 0.5 // DefaultRecommenderName recommender name explicitly (and so implicitly specify that the default recommender should handle them) DefaultRecommenderName = "default" ) // ClusterStateFeeder can update state of ClusterState object. type ClusterStateFeeder interface { // InitFromHistoryProvider loads historical pod spec into clusterState. InitFromHistoryProvider(historyProvider history.HistoryProvider) // InitFromCheckpoints loads historical checkpoints into clusterState. InitFromCheckpoints() // LoadVPAs updates clusterState with current state of VPAs. LoadVPAs() // LoadPods updates clusterState with current specification of Pods and their Containers. LoadPods() // LoadRealTimeMetrics updates clusterState with current usage metrics of containers. LoadRealTimeMetrics() // GarbageCollectCheckpoints removes historical checkpoints that don't have a matching VPA. GarbageCollectCheckpoints() } // ClusterStateFeederFactory makes instances of ClusterStateFeeder. type ClusterStateFeederFactory struct { ClusterState *model.ClusterState KubeClient kube_client.Interface MetricsClient metrics.MetricsClient VpaCheckpointClient vpa_api.VerticalPodAutoscalerCheckpointsGetter VpaLister vpa_lister.VerticalPodAutoscalerLister PodLister v1lister.PodLister OOMObserver oom.Observer SelectorFetcher target.VpaTargetSelectorFetcher MemorySaveMode bool ControllerFetcher controllerfetcher.ControllerFetcher RecommenderName string } // Make creates new ClusterStateFeeder with internal data providers, based on kube client. func (m ClusterStateFeederFactory) Make() *clusterStateFeeder { return &clusterStateFeeder{ coreClient: m.KubeClient.CoreV1(), metricsClient: m.MetricsClient, oomChan: m.OOMObserver.GetObservedOomsChannel(), vpaCheckpointClient: m.VpaCheckpointClient, vpaLister: m.VpaLister, clusterState: m.ClusterState, specClient: spec.NewSpecClient(m.PodLister), selectorFetcher: m.SelectorFetcher, memorySaveMode: m.MemorySaveMode, controllerFetcher: m.ControllerFetcher, recommenderName: m.RecommenderName, } } // WatchEvictionEventsWithRetries watches new Events with reason=Evicted and passes them to the observer. func WatchEvictionEventsWithRetries(kubeClient kube_client.Interface, observer oom.Observer, namespace string) { go func() { options := metav1.ListOptions{ FieldSelector: "reason=Evicted", } watchEvictionEventsOnce := func() { watchInterface, err := kubeClient.CoreV1().Events(namespace).Watch(context.TODO(), options) if err != nil { klog.Errorf("Cannot initialize watching events. Reason %v", err) return } watchEvictionEvents(watchInterface.ResultChan(), observer) } for { watchEvictionEventsOnce() // Wait between attempts, retrying too often breaks API server. waitTime := wait.Jitter(evictionWatchRetryWait, evictionWatchJitterFactor) klog.V(1).Infof("An attempt to watch eviction events finished. Waiting %v before the next one.", waitTime) time.Sleep(waitTime) } }() } func watchEvictionEvents(evictedEventChan <-chan watch.Event, observer oom.Observer) { for { evictedEvent, ok := <-evictedEventChan if !ok { klog.V(3).Infof("Eviction event chan closed") return } if evictedEvent.Type == watch.Added { evictedEvent, ok := evictedEvent.Object.(*apiv1.Event) if !ok { continue } observer.OnEvent(evictedEvent) } } } // Creates clients watching pods: PodLister (listing only not terminated pods). func newPodClients(kubeClient kube_client.Interface, resourceEventHandler cache.ResourceEventHandler, namespace string) v1lister.PodLister { // We are interested in pods which are Running or Unknown (in case the pod is // running but there are some transient errors we don't want to delete it from // our model). // We don't want to watch Pending pods because they didn't generate any usage // yet. // Succeeded and Failed failed pods don't generate any usage anymore but we // don't necessarily want to immediately delete them. selector := fields.ParseSelectorOrDie("status.phase!=" + string(apiv1.PodPending)) podListWatch := cache.NewListWatchFromClient(kubeClient.CoreV1().RESTClient(), "pods", namespace, selector) indexer, controller := cache.NewIndexerInformer( podListWatch, &apiv1.Pod{}, time.Hour, resourceEventHandler, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, ) podLister := v1lister.NewPodLister(indexer) stopCh := make(chan struct{}) go controller.Run(stopCh) return podLister } // NewPodListerAndOOMObserver creates pair of pod lister and OOM observer. func NewPodListerAndOOMObserver(kubeClient kube_client.Interface, namespace string) (v1lister.PodLister, oom.Observer) { oomObserver := oom.NewObserver() podLister := newPodClients(kubeClient, oomObserver, namespace) WatchEvictionEventsWithRetries(kubeClient, oomObserver, namespace) return podLister, oomObserver } type clusterStateFeeder struct { coreClient corev1.CoreV1Interface specClient spec.SpecClient metricsClient metrics.MetricsClient oomChan <-chan oom.OomInfo vpaCheckpointClient vpa_api.VerticalPodAutoscalerCheckpointsGetter vpaLister vpa_lister.VerticalPodAutoscalerLister clusterState *model.ClusterState selectorFetcher target.VpaTargetSelectorFetcher memorySaveMode bool controllerFetcher controllerfetcher.ControllerFetcher recommenderName string } func (feeder *clusterStateFeeder) InitFromHistoryProvider(historyProvider history.HistoryProvider) { klog.V(3).Info("Initializing VPA from history provider") clusterHistory, err := historyProvider.GetClusterHistory() if err != nil { klog.Errorf("Cannot get cluster history: %v", err) } for podID, podHistory := range clusterHistory { klog.V(4).Infof("Adding pod %v with labels %v", podID, podHistory.LastLabels) feeder.clusterState.AddOrUpdatePod(podID, podHistory.LastLabels, apiv1.PodUnknown) for containerName, sampleList := range podHistory.Samples { containerID := model.ContainerID{ PodID: podID, ContainerName: containerName, } if err = feeder.clusterState.AddOrUpdateContainer(containerID, nil); err != nil { klog.Warningf("Failed to add container %+v. Reason: %+v", containerID, err) } klog.V(4).Infof("Adding %d samples for container %v", len(sampleList), containerID) for _, sample := range sampleList { if err := feeder.clusterState.AddSample( &model.ContainerUsageSampleWithKey{ ContainerUsageSample: sample, Container: containerID, }); err != nil { klog.Warningf("Error adding metric sample for container %v: %v", containerID, err) } } } } } func (feeder *clusterStateFeeder)
(checkpoint *vpa_types.VerticalPodAutoscalerCheckpoint) error { vpaID := model.VpaID{Namespace: checkpoint.Namespace, VpaName: checkpoint.Spec.VPAObjectName} vpa, exists := feeder.clusterState.Vpas[vpaID] if !exists { return fmt.Errorf("cannot load checkpoint to missing VPA object %+v", vpaID) } cs := model.NewAggregateContainerState() err := cs.LoadFromCheckpoint(&checkpoint.Status) if err != nil { return fmt.Errorf("cannot load checkpoint for VPA %+v. Reason: %v", vpa.ID, err) } vpa.ContainersInitialAggregateState[checkpoint.Spec.ContainerName] = cs return nil } func (feeder *clusterStateFeeder) InitFromCheckpoints() { klog.V(3).Info("Initializing VPA from checkpoints") feeder.LoadVPAs() namespaces := make(map[string]bool) for _, v := range feeder.clusterState.Vpas { namespaces[v.ID.Namespace] = true } for namespace := range namespaces { klog.V(3).Infof("Fetching checkpoints from namespace %s", namespace) checkpointList, err := feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).List(context.TODO(), metav1.ListOptions{}) if err != nil { klog.Errorf("Cannot list VPA checkpoints from namespace %v. Reason: %+v", namespace, err) } for _, checkpoint := range checkpointList.Items { klog.V(3).Infof("Loading VPA %s/%s checkpoint for %s", checkpoint.ObjectMeta.Namespace, checkpoint.Spec.VPAObjectName, checkpoint.Spec.ContainerName) err = feeder.setVpaCheckpoint(&checkpoint) if err != nil { klog.Errorf("Error while loading checkpoint. Reason: %+v", err) } } } } func (feeder *clusterStateFeeder) GarbageCollectCheckpoints() { klog.V(3).Info("Starting garbage collection of checkpoints") feeder.LoadVPAs() namespaceList, err := feeder.coreClient.Namespaces().List(context.TODO(), metav1.ListOptions{}) if err != nil { klog.Errorf("Cannot list namespaces. Reason: %+v", err) return } for _, namespaceItem := range namespaceList.Items { namespace := namespaceItem.Name checkpointList, err := feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).List(context.TODO(), metav1.ListOptions{}) if err != nil { klog.Errorf("Cannot list VPA checkpoints from namespace %v. Reason: %+v", namespace, err) } for _, checkpoint := range checkpointList.Items { vpaID := model.VpaID{Namespace: checkpoint.Namespace, VpaName: checkpoint.Spec.VPAObjectName} _, exists := feeder.clusterState.Vpas[vpaID] if !exists { err = feeder.vpaCheckpointClient.VerticalPodAutoscalerCheckpoints(namespace).Delete(context.TODO(), checkpoint.Name, metav1.DeleteOptions{}) if err == nil { klog.V(3).Infof("Orphaned VPA checkpoint cleanup - deleting %v/%v.", namespace, checkpoint.Name) } else { klog.Errorf("Cannot delete VPA checkpoint %v/%v. Reason: %+v", namespace, checkpoint.Name, err) } } } } } func implicitDefaultRecommender(selectors []*vpa_types.VerticalPodAutoscalerRecommenderSelector) bool { return len(selectors) == 0 } func selectsRecommender(selectors []*vpa_types.VerticalPodAutoscalerRecommenderSelector, name *string) bool { for _, s := range selectors { if s.Name == *name { return true } } return false } // Filter VPA objects whose specified recommender names are not default func filterVPAs(feeder *clusterStateFeeder, allVpaCRDs []*vpa_types.VerticalPodAutoscaler) []*vpa_types.VerticalPodAutoscaler { klog.V(3).Infof("Start selecting the vpaCRDs.") var vpaCRDs []*vpa_types.VerticalPodAutoscaler for _, vpaCRD := range allVpaCRDs { if feeder.recommenderName == DefaultRecommenderName { if !implicitDefaultRecommender(vpaCRD.Spec.Recommenders) && !selectsRecommender(vpaCRD.Spec.Recommenders, &feeder.recommenderName) { klog.V(6).Infof("Ignoring vpaCRD %s in namespace %s as current recommender's name %v doesn't appear among its recommenders", vpaCRD.Name, vpaCRD.Namespace, feeder.recommenderName) continue } } else { if implicitDefaultRecommender(vpaCRD.Spec.Recommenders) { klog.V(6).Infof("Ignoring vpaCRD %s in namespace %s as %v recommender doesn't process CRDs implicitly destined to %v recommender", vpaCRD.Name, vpaCRD.Namespace, feeder.recommenderName, DefaultRecommenderName) continue } if !selectsRecommender(vpaCRD.Spec.Recommenders, &feeder.recommenderName) { klog.V(6).Infof("Ignoring vpaCRD %s in namespace %s as current recommender's name %v doesn't appear among its recommenders", vpaCRD.Name, vpaCRD.Namespace, feeder.recommenderName) continue } } vpaCRDs = append(vpaCRDs, vpaCRD) } return vpaCRDs } // LoadVPAs fetches VPA objects and loads them into the cluster state. func (feeder *clusterStateFeeder) LoadVPAs() { // List VPA API objects. allVpaCRDs, err := feeder.vpaLister.List(labels.Everything()) if err != nil { klog.Errorf("Cannot list VPAs. Reason: %+v", err) return } // Filter out VPAs that specified recommenders with names not equal to "default" vpaCRDs := filterVPAs(feeder, allVpaCRDs) klog.V(3).Infof("Fetched %d VPAs.", len(vpaCRDs)) // Add or update existing VPAs in the model. vpaKeys := make(map[model.VpaID]bool) for _, vpaCRD := range vpaCRDs { vpaID := model.VpaID{ Namespace: vpaCRD.Namespace, VpaName: vpaCRD.Name, } selector, conditions := feeder.getSelector(vpaCRD) klog.V(4).Infof("Using selector %s for VPA %s/%s", selector.String(), vpaCRD.Namespace, vpaCRD.Name) if feeder.clusterState.AddOrUpdateVpa(vpaCRD, selector) == nil { // Successfully added VPA to the model. vpaKeys[vpaID] = true for _, condition := range conditions { if condition.delete { delete(feeder.clusterState.Vpas[vpaID].Conditions, condition.conditionType) } else { feeder.clusterState.Vpas[vpaID].Conditions.Set(condition.conditionType, true, "", condition.message) } } } } // Delete non-existent VPAs from the model. for vpaID := range feeder.clusterState.Vpas { if _, exists := vpaKeys[vpaID]; !exists { klog.V(3).Infof("Deleting VPA %v", vpaID) if err := feeder.clusterState.DeleteVpa(vpaID); err != nil { klog.Errorf("Deleting VPA %v failed: %v", vpaID, err) } } } feeder.clusterState.ObservedVpas = vpaCRDs } // LoadPods loads pod into the cluster state. func (feeder *clusterStateFeeder) LoadPods() { podSpecs, err := feeder.specClient.GetPodSpecs() if err != nil { klog.Errorf("Cannot get SimplePodSpecs. Reason: %+v", err) } pods := make(map[model.PodID]*spec.BasicPodSpec) for _, spec := range podSpecs { pods[spec.ID] = spec } for key := range feeder.clusterState.Pods { if _, exists := pods[key]; !exists { klog.V(3).Infof("Deleting Pod %v", key) feeder.clusterState.DeletePod(key) } } for _, pod := range pods { if feeder.memorySaveMode && !feeder.matchesVPA(pod) { continue } feeder.clusterState.AddOrUpdatePod(pod.ID, pod.PodLabels, pod.Phase) for _, container := range pod.Containers { if err = feeder.clusterState.AddOrUpdateContainer(container.ID, container.Request); err != nil { klog.Warningf("Failed to add container %+v. Reason: %+v", container.ID, err) } } } } func (feeder *clusterStateFeeder) LoadRealTimeMetrics() { containersMetrics, err := feeder.metricsClient.GetContainersMetrics() if err != nil { klog.Errorf("Cannot get ContainerMetricsSnapshot from MetricsClient. Reason: %+v", err) } sampleCount := 0 droppedSampleCount := 0 for _, containerMetrics := range containersMetrics { for _, sample := range newContainerUsageSamplesWithKey(containerMetrics) { if err := feeder.clusterState.AddSample(sample); err != nil { // Not all pod states are tracked in memory saver mode if _, isKeyError := err.(model.KeyError); isKeyError && feeder.memorySaveMode { continue } klog.Warningf("Error adding metric sample for container %v: %v", sample.Container, err) droppedSampleCount++ } else { sampleCount++ } } } klog.V(3).Infof("ClusterSpec fed with #%v ContainerUsageSamples for #%v containers. Dropped #%v samples.", sampleCount, len(containersMetrics), droppedSampleCount) Loop: for { select { case oomInfo := <-feeder.oomChan: klog.V(3).Infof("OOM detected %+v", oomInfo) if err = feeder.clusterState.RecordOOM(oomInfo.ContainerID, oomInfo.Timestamp, oomInfo.Memory); err != nil { klog.Warningf("Failed to record OOM %+v. Reason: %+v", oomInfo, err) } default: break Loop } } metrics_recommender.RecordAggregateContainerStatesCount(feeder.clusterState.StateMapSize()) } func (feeder *clusterStateFeeder) matchesVPA(pod *spec.BasicPodSpec) bool { for vpaKey, vpa := range feeder.clusterState.Vpas { podLabels := labels.Set(pod.PodLabels) if vpaKey.Namespace == pod.ID.Namespace && vpa.PodSelector.Matches(podLabels) { return true } } return false } func newContainerUsageSamplesWithKey(metrics *metrics.ContainerMetricsSnapshot) []*model.ContainerUsageSampleWithKey { var samples []*model.ContainerUsageSampleWithKey for metricName, resourceAmount := range metrics.Usage { sample := &model.ContainerUsageSampleWithKey{ Container: metrics.ID, ContainerUsageSample: model.ContainerUsageSample{ MeasureStart: metrics.SnapshotTime, Resource: metricName, Usage: resourceAmount, }, } samples = append(samples, sample) } return samples } type condition struct { conditionType vpa_types.VerticalPodAutoscalerConditionType delete bool message string } func (feeder *clusterStateFeeder) validateTargetRef(vpa *vpa_types.VerticalPodAutoscaler) (bool, condition) { // if vpa.Spec.TargetRef == nil { return false, condition{} } k := controllerfetcher.ControllerKeyWithAPIVersion{ ControllerKey: controllerfetcher.ControllerKey{ Namespace: vpa.Namespace, Kind: vpa.Spec.TargetRef.Kind, Name: vpa.Spec.TargetRef.Name, }, ApiVersion: vpa.Spec.TargetRef.APIVersion, } top, err := feeder.controllerFetcher.FindTopMostWellKnownOrScalable(&k) if err != nil { return false, condition{conditionType: vpa_types.ConfigUnsupported, delete: false, message: fmt.Sprintf("Error checking if target is a topmost well-known or scalable controller: %s", err)} } if top == nil { return false, condition{conditionType: vpa_types.ConfigUnsupported, delete: false, message: fmt.Sprintf("Unknown error during checking if target is a topmost well-known or scalable controller: %s", err)} } if *top != k { return false, condition{conditionType: vpa_types.ConfigUnsupported, delete: false, message: "The targetRef controller has a parent but it should point to a topmost well-known or scalable controller"} } return true, condition{} } func (feeder *clusterStateFeeder) getSelector(vpa *vpa_types.VerticalPodAutoscaler) (labels.Selector, []condition) { selector, fetchErr := feeder.selectorFetcher.Fetch(vpa) if selector != nil { validTargetRef, unsupportedCondition := feeder.validateTargetRef(vpa) if !validTargetRef { return labels.Nothing(), []condition{ unsupportedCondition, {conditionType: vpa_types.ConfigDeprecated, delete: true}, } } return selector, []condition{ {conditionType: vpa_types.ConfigUnsupported, delete: true}, {conditionType: vpa_types.ConfigDeprecated, delete: true}, } } msg := "Cannot read targetRef" if fetchErr != nil { klog.Errorf("Cannot get target selector from VPA's targetRef. Reason: %+v", fetchErr) msg = fmt.Sprintf("Cannot read targetRef. Reason: %s", fetchErr.Error()) } return labels.Nothing(), []condition{ {conditionType: vpa_types.ConfigUnsupported, delete: false, message: msg}, {conditionType: vpa_types.ConfigDeprecated, delete: true}, } }
setVpaCheckpoint
identifier_name
buffers_handler.ts
/** * Copyright 2015 CANAL+ Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { combineLatest as observableCombineLatest, concat as observableConcat, merge as observableMerge, Observable, of as observableOf, ReplaySubject, Subject, } from "rxjs"; import { catchError, distinctUntilChanged, exhaustMap, filter, ignoreElements, map, mapTo, mergeMap, share, startWith, switchMap, take, takeUntil, tap, } from "rxjs/operators"; import config from "../../config"; import { ICustomError, MediaError, } from "../../errors"; import log from "../../log"; import Manifest, { Adaptation, Period, } from "../../manifest"; import arrayIncludes from "../../utils/array-includes"; import InitializationSegmentCache from "../../utils/initialization_segment_cache"; import SortedList from "../../utils/sorted_list"; import WeakMapMemory from "../../utils/weak_map_memory"; import BufferManager, { createFakeBuffer, IAdaptationBufferEvent, IBufferClockTick, } from "../buffer"; import { IPipelineOptions, SegmentPipelinesManager, } from "../pipelines"; import SourceBufferManager, { getBufferTypes, IBufferType, ITextTrackSourceBufferOptions, QueuedSourceBuffer, } from "../source_buffers"; import ActivePeriodEmitter, { IPeriodBufferInfos, } from "./active_period_emitter"; import SegmentBookkeeper from "./segment_bookkeeper"; import EVENTS, { IActivePeriodChangedEvent, IAdaptationChangeEvent, ICompletedBufferEvent, IEndOfStreamEvent, IPeriodBufferClearedEvent, IPeriodBufferReadyEvent, IResumeStreamEvent, } from "./stream_events"; // Events coming from single PeriodBuffer type IPeriodBufferEvent = IAdaptationBufferEvent<any> | IAdaptationChangeEvent; // Events coming from function(s) managing multiple PeriodBuffers. type IMultiplePeriodBuffersEvent = IPeriodBufferEvent | IPeriodBufferReadyEvent | IPeriodBufferClearedEvent | ICompletedBufferEvent; // Every events sent by the BuffersHandler exported here. export type IBufferHandlerEvent = IActivePeriodChangedEvent | IMultiplePeriodBuffersEvent | IEndOfStreamEvent | IResumeStreamEvent; /** * Create and manage the various Buffer Observables needed for the content to * stream: * * - Create or dispose SourceBuffers depending on the chosen adaptations. * * - Concatenate Buffers for adaptation from separate Periods at the right * time, to allow smooth transitions between periods. * * - Emit events as Period or Adaptations change or as new Period are * prepared. * * Here multiple buffers can be created at the same time to allow smooth * transitions between periods. * To do this, we dynamically create or destroy buffers as they are needed. * * @param {Object} content - The content to play. Contains the following * properties: * - manifest {Manifest} * * - period {Period} - The first period to play in the content * * - clock$ {Observable} - Emit current informations about the content * being played. Also regulate the frequencies of the time the Buffer check * for new its status / new segments. * * - bufferManager {BufferManager} - Will be used to create new * AdaptationBuffers at will * * - sourceBufferManager {SourceBufferManager} - Will be used to lazily * create SourceBuffer instances associated with the current content. * * - segmentPipelinesManager {SegmentPipelinesManager} - Used to download * segments. * * - segmentBookkeeper {WeakMapMemory} - Allow to easily retrieve or create * a unique SegmentBookkeeper per SourceBuffer * * - garbageCollectors {WeakMapMemory} - Allow to easily retrieve or create * a unique Garbage Collector per SourceBuffer * * - options {Object} * * - errorStream {Subject} - Subject to emit minor errors * @returns {Observable} * * TODO Special case for image Buffer, where we want data for EVERY active * periods. * * TODO Special garbage collection for text and image buffers, as we want to * clean it for potentially very long sessions. */ export default function BuffersHandler( content : { manifest : Manifest; period : Period }, clock$ : Observable<IBufferClockTick>, wantedBufferAhead$ : Observable<number>, bufferManager : BufferManager, sourceBufferManager : SourceBufferManager, segmentPipelinesManager : SegmentPipelinesManager<any>, segmentBookkeepers : WeakMapMemory<QueuedSourceBuffer<any>, SegmentBookkeeper>, garbageCollectors : WeakMapMemory<QueuedSourceBuffer<any>, Observable<never>>, options: { maxRetry? : number; maxRetryOffline? : number; textTrackOptions? : ITextTrackSourceBufferOptions; }, errorStream : Subject<Error | ICustomError> ) : Observable<IBufferHandlerEvent> { const manifest = content.manifest; const firstPeriod = content.period; // Initialize all native source buffers from the first period at the same // time. // We cannot lazily create native sourcebuffers since the spec does not // allow adding them during playback. // // From https://w3c.github.io/media-source/#methods // For example, a user agent may throw a QuotaExceededError // exception if the media element has reached the HAVE_METADATA // readyState. This can occur if the user agent's media engine // does not support adding more tracks during playback. createNativeSourceBuffersForPeriod(sourceBufferManager, firstPeriod); const addPeriodBuffer$ = new Subject<IPeriodBufferInfos>(); const removePeriodBuffer$ = new Subject<IPeriodBufferInfos>(); const bufferTypes = getBufferTypes(); /** * Every PeriodBuffers for every possible types * @type {Array.<Observable>} */ const buffersArray = bufferTypes .map((bufferType) => { return manageEveryBuffers(bufferType, firstPeriod) .pipe( tap((evt) => { if (evt.type === "periodBufferReady") { addPeriodBuffer$.next(evt.value); } else if (evt.type === "periodBufferCleared") { removePeriodBuffer$.next(evt.value); } }), share() ); }); /** * Emits the active Period every time it changes * @type {Observable} */ const activePeriod$ : Observable<Period> = ActivePeriodEmitter(bufferTypes, addPeriodBuffer$, removePeriodBuffer$) .pipe(filter((period) : period is Period => !!period)); /** * Emits the activePeriodChanged events every time the active Period changes. * @type {Observable} */ const activePeriodChanged$ = activePeriod$ .pipe( tap((period : Period) => { log.info("new active period", period); }), map(period => EVENTS.activePeriodChanged(period)) ); /** * Emits an "end-of-stream" event once every PeriodBuffer are complete. * @type {Observable} */ const streamHasEnded$ = buffersAreComplete(...buffersArray) .pipe(map((areComplete) => areComplete ? EVENTS.endOfStream() : EVENTS.resumeStream() )); return observableMerge( activePeriodChanged$, ...buffersArray, streamHasEnded$ ); /** * Manage creation and removal of Buffers for every Periods. * * Works by creating consecutive buffers through the * manageConsecutivePeriodBuffers function, and restarting it when the clock * goes out of the bounds of these buffers. * @param {string} bufferType - e.g. "audio" or "video" * @param {Period} basePeriod - Initial Period downloaded. * @returns {Observable} */ function manageEveryBuffers( bufferType : IBufferType, basePeriod : Period ) : Observable<IMultiplePeriodBuffersEvent> { /** * Keep a PeriodList for cases such as seeking ahead/before the * buffers already created. * When that happens, interrupt the previous buffers and create one back * from the new initial period. * @type {ConsecutivePeriodList} */ const periodList = new SortedList<Period>((a, b) => a.start - b.start); /** * Returns true if the given time is either: * - less than the start of the chronologically first Period * - more than the end of the chronologically last Period * @param {number} time * @returns {boolean} */ function isOutOfPeriodList(time : number) : boolean { const head = periodList.head(); const last = periodList.last(); if (head == null || last == null) { // if no period return true; } return head.start > time || (last.end || Infinity) < time; } /** * Destroy the current set of consecutive buffers. * Used when the clocks goes out of the bounds of those, e.g. when the user * seeks. * We can then re-create consecutive buffers, from the new point in time. * @type {Subject} */ const destroyCurrentBuffers = new Subject<void>(); const restartBuffers$ = clock$.pipe( filter(({ currentTime, wantedTimeOffset }) => { if (!manifest.getPeriodForTime(wantedTimeOffset + currentTime)) { // TODO Manage out-of-manifest situations return false; } return isOutOfPeriodList(wantedTimeOffset + currentTime); }), take(1), tap(({ currentTime, wantedTimeOffset }) => { log.info("Current position out of the bounds of the active periods," + "re-creating buffers.", bufferType, currentTime + wantedTimeOffset); destroyCurrentBuffers.next(); }), mergeMap(({ currentTime, wantedTimeOffset }) => { const newInitialPeriod = manifest .getPeriodForTime(currentTime + wantedTimeOffset); if (newInitialPeriod == null) { throw new MediaError("MEDIA_TIME_NOT_FOUND", null, true); } else { // Note: For this to work, manageEveryBuffers should always emit the // "periodBufferReady" event for the new InitialPeriod synchronously return manageEveryBuffers(bufferType, newInitialPeriod); } }) ); const currentBuffers$ = manageConsecutivePeriodBuffers( bufferType, basePeriod, destroyCurrentBuffers ).pipe( tap((message) => { if (message.type === "periodBufferReady") { periodList.add(message.value.period); } else if (message.type === "periodBufferCleared") { periodList.removeFirst(message.value.period); } }), share() // as always, with side-effects ); return observableMerge(currentBuffers$, restartBuffers$); } /** * Manage creation and removal of Buffers for consecutive Periods. * * This function is called recursively for each successive Periods as needed. * * This function does not guarantee creation/destruction of the right Buffers * when the user seeks or rewind in the content. * It only manages regular playback, another layer should be used to manage * those cases. * * You can know about buffers creation and destruction respectively through * the "periodBufferReady" and "periodBufferCleared" events. * * The "periodBufferReady" related to the given period should be sent synchronously * on subscription. * Further "periodBufferReady" for further Periods should be sent each time the * Buffer for the previous Buffer is full. * * Buffers for each Period are cleared ("periodBufferCleared" event) either: * - when it has finished to play (currentTime is after it) * - when one of the older Buffers becomes active again, in which case the * Buffers coming after will be cleared from the newest to the oldest. * - when the destroy$ observable emits, in which case every created Buffer * here will be cleared from the newest to the oldest. * * TODO The code here can surely be greatly simplified. * @param {string} bufferType - e.g. "audio" or "video" * @param {Period} basePeriod - Initial Period downloaded. * @param {Observable} destroy$ - Emit when/if all created Buffer from this * point should be destroyed. * @returns {Observable} */ function manageConsecutivePeriodBuffers( bufferType : IBufferType, basePeriod : Period, destroy$ : Observable<void> ) : Observable<IMultiplePeriodBuffersEvent> { log.info("creating new Buffer for", bufferType, basePeriod); /** * Emits the chosen adaptation for the current type. * @type {ReplaySubject} */ const adaptation$ = new ReplaySubject<Adaptation|null>(1); /** * Emits the Period of the next Period Buffer when it can be created. * @type {Subject} */ const createNextPeriodBuffer$ = new Subject<Period>(); /** * Emits when the Buffers for the next Periods should be destroyed, if * created. * @type {Subject} */ const destroyNextBuffers$ = new Subject<void>(); /** * Emits when the current position goes over the end of the current buffer. * @type {Subject} */ const endOfCurrentBuffer$ = clock$ .pipe(filter(({ currentTime, wantedTimeOffset }) => !!basePeriod.end && (currentTime + wantedTimeOffset) >= basePeriod.end )); /** * Create Period Buffer for the next Period. * @type {Observable} */ const nextPeriodBuffer$ = createNextPeriodBuffer$ .pipe(exhaustMap((nextPeriod) => { return manageConsecutivePeriodBuffers( bufferType, nextPeriod, destroyNextBuffers$); })); /** * Allows to destroy each created Buffer, from the newest to the oldest, * once destroy$ emits. * @type {Observable} */ const destroyAll$ = destroy$.pipe( take(1), tap(() => { // first complete createNextBuffer$ to allow completion of the // nextPeriodBuffer$ observable once every further Buffers have been // cleared. createNextPeriodBuffer$.complete(); // emit destruction signal to the next Buffer first destroyNextBuffers$.next(); destroyNextBuffers$.complete(); // we do not need it anymore }), share() // share side-effects ); /** * Will emit when the current buffer should be destroyed. * @type {Observable} */ const killCurrentBuffer$ = observableMerge(endOfCurrentBuffer$, destroyAll$); const periodBuffer$ = createPeriodBuffer(bufferType, basePeriod, adaptation$).pipe( mergeMap(( evt : IPeriodBufferEvent ) : Observable<IMultiplePeriodBuffersEvent> => { const { type } = evt; if (type === "full-buffer") { /** * The Period coming just after the current one. * @type {Period|undefined} */ const nextPeriod = manifest.getPeriodAfter(basePeriod); if (nextPeriod == null) { // no more period, emits event return observableOf(EVENTS.bufferComplete(bufferType)); } else { // current buffer is full, create the next one if not createNextPeriodBuffer$.next(nextPeriod); } } else if (type === "active-buffer") { // current buffer is active, destroy next buffer if created destroyNextBuffers$.next(); } return observableOf(evt); }), share() ); /** * Buffer for the current Period. * @type {Observable} */ const currentBuffer$ : Observable<IMultiplePeriodBuffersEvent> = observableConcat( observableOf(EVENTS.periodBufferReady(bufferType, basePeriod, adaptation$)), periodBuffer$.pipe(takeUntil(killCurrentBuffer$)), observableOf(EVENTS.periodBufferCleared(bufferType, basePeriod)) .pipe(tap(() => { log.info("destroying buffer for", bufferType, basePeriod); })) ); return observableMerge( currentBuffer$, nextPeriodBuffer$, destroyAll$.pipe(ignoreElements()) ); } /** * Create single PeriodBuffer Observable: * - Lazily create (or reuse) a SourceBuffer for the given type. * - Create a Buffer linked to an Adaptation each time it changes, to * download and append the corresponding Segments in the SourceBuffer. * - Announce when the Buffer is full or is awaiting new Segments through * events * * /!\ This Observable has multiple side-effects (creation of SourceBuffers, * downloading and appending of Segments etc.) on subscription. * * @param {string} bufferType * @param {Period} period - The period concerned * @param {Observable} adaptation$ - Emit the chosen adaptation. * Emit null to deactivate a type of adaptation * @returns {Observable} */ function createPeriodBuffer( bufferType : IBufferType, period: Period, adaptation$ : Observable<Adaptation|null> ) : Observable<IPeriodBufferEvent> { return adaptation$.pipe(switchMap((adaptation) => { if (adaptation == null) { log.info(`set no ${bufferType} Adaptation`, period); let cleanBuffer$ : Observable<null>; if (sourceBufferManager.has(bufferType)) { log.info(`clearing previous ${bufferType} SourceBuffer`); const _queuedSourceBuffer = sourceBufferManager.get(bufferType); cleanBuffer$ = _queuedSourceBuffer .removeBuffer({ start: period.start, end: period.end || Infinity }) .pipe(mapTo(null)); } else { cleanBuffer$ = observableOf(null); } return observableConcat( cleanBuffer$.pipe(mapTo(EVENTS.adaptationChange(bufferType, null, period))), createFakeBuffer(clock$, wantedBufferAhead$, bufferType, { manifest, period }) ); } log.info(`updating ${bufferType} adaptation`, adaptation, period); // 1 - create or reuse the SourceBuffer let queuedSourceBuffer : QueuedSourceBuffer<any>; if (sourceBufferManager.has(bufferType)) { log.info("reusing a previous SourceBuffer for the type", bufferType); queuedSourceBuffer = sourceBufferManager.get(bufferType); } else { const codec = getFirstDeclaredMimeType(adaptation); const sourceBufferOptions = bufferType === "text" ? options.textTrackOptions : undefined; queuedSourceBuffer = sourceBufferManager .createSourceBuffer(bufferType, codec, sourceBufferOptions); } // 2 - create or reuse the associated BufferGarbageCollector and // SegmentBookkeeper const bufferGarbageCollector$ = garbageCollectors.get(queuedSourceBuffer); const segmentBookkeeper = segmentBookkeepers.get(queuedSourceBuffer); // TODO Clean previous QueuedSourceBuffer for previous content in the period // // 3 - Clean possible content from a precedent adaptation in this period // // (take the clock into account to avoid removing "now" for native sourceBuffers) // // like: // return clock$.pluck("currentTime").take(1).mergeMap(currentTime => { // }) // 3 - create the pipeline const pipelineOptions = getPipelineOptions( bufferType, options.maxRetry, options.maxRetryOffline); const pipeline = segmentPipelinesManager .createPipeline(bufferType, pipelineOptions); // 4 - create the Buffer const adaptationBuffer$ = bufferManager.createBuffer( clock$, queuedSourceBuffer, segmentBookkeeper, pipeline, wantedBufferAhead$, { manifest, period, adaptation } ).pipe(catchError<IAdaptationBufferEvent<any>, never>((error : Error) => { // non native buffer should not impact the stability of the // player. ie: if a text buffer sends an error, we want to // continue streaming without any subtitles if (!SourceBufferManager.isNative(bufferType)) { log.error("custom buffer: ", bufferType, "has crashed. Aborting it.", error); sourceBufferManager.disposeSourceBuffer(bufferType); errorStream.next(error); return createFakeBuffer( clock$, wantedBufferAhead$, bufferType, { manifest, period }); } log.error( "native buffer: ", bufferType, "has crashed. Stopping playback.", error); throw error; // else, throw })); // 5 - Return the buffer and send right events return observableConcat( observableOf(EVENTS.adaptationChange(bufferType, adaptation, period)), observableMerge(adaptationBuffer$, bufferGarbageCollector$) ); })); } } /** * @param {string} bufferType * @param {number} retry * @param {number} offlineRetry * @returns {Object} - Options to give to the Pipeline */ function getPipelineOptions( bufferType : string, retry? : number, offlineRetry? : number ) : IPipelineOptions<any, any> { const cache = arrayIncludes(["audio", "video"], bufferType) ? new InitializationSegmentCache<any>() : undefined; let maxRetry : number; let maxRetryOffline : number; if (bufferType === "image") { maxRetry = 0; // Deactivate BIF fetching if it fails } else { maxRetry = retry != null ? retry : config.DEFAULT_MAX_PIPELINES_RETRY_ON_ERROR; } maxRetryOffline = offlineRetry != null ? offlineRetry : config.DEFAULT_MAX_PIPELINES_RETRY_ON_OFFLINE; return { cache, maxRetry, maxRetryOffline, }; } /** * Returns an Observable which emits ``undefined`` and complete when all * buffers given are _complete_. * * A PeriodBuffer for a given type is considered _complete_ when both of these * conditions are true: * - it is the last PeriodBuffer in the content for the given type * - it has finished downloading segments (it is _full_) * * Simply put a _complete_ PeriodBuffer for a given type means that every * segments needed for this Buffer have been downloaded. * * When the Observable returned here emits, every Buffer are finished. * @param {...Observable} buffers * @returns {Observable} */ function buffersAreComplete( ...buffers : Array<Observable<IMultiplePeriodBuffersEvent>> ) : Observable<boolean> { /** * Array of Observables linked to the Array of Buffers which emit: * - true when the corresponding buffer is considered _complete_. * - false when the corresponding buffer is considered _active_. * @type {Array.<Observable>} */ const isCompleteArray : Array<Observable<boolean>> = buffers .map((buffer) => { return buffer.pipe( filter((evt) => { return evt.type === "complete-buffer" || evt.type === "active-buffer"; }), map((evt) => evt.type === "complete-buffer"), startWith(false), distinctUntilChanged() ); }); return observableCombineLatest(...isCompleteArray) .pipe( map((areComplete) => areComplete.every((isComplete) => isComplete)), distinctUntilChanged() ); } /** * Get mimetype string of the first representation declared in the given * adaptation. * @param {Adaptation} adaptation * @returns {string} */ function g
adaptation : Adaptation) : string { const { representations } = adaptation; return ( representations[0] && representations[0].getMimeTypeString() ) || ""; } /** * Create all native SourceBuffers needed for a given Period. * * Native Buffers have the particulary to need to be created at the beginning of * the content. * Custom source buffers (entirely managed in JS) can generally be created and * disposed at will during the lifecycle of the content. * @param {SourceBufferManager} sourceBufferManager * @param {Period} period */ function createNativeSourceBuffersForPeriod( sourceBufferManager : SourceBufferManager, period : Period ) : void { Object.keys(period.adaptations).forEach(bufferType => { if (SourceBufferManager.isNative(bufferType)) { const adaptations = period.adaptations[bufferType] || []; const representations = adaptations ? adaptations[0].representations : []; if (representations.length) { const codec = representations[0].getMimeTypeString(); sourceBufferManager.createSourceBuffer(bufferType, codec); } } }); }
etFirstDeclaredMimeType(
identifier_name
buffers_handler.ts
/** * Copyright 2015 CANAL+ Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { combineLatest as observableCombineLatest, concat as observableConcat, merge as observableMerge, Observable, of as observableOf, ReplaySubject, Subject, } from "rxjs"; import { catchError, distinctUntilChanged, exhaustMap, filter, ignoreElements, map, mapTo, mergeMap, share, startWith, switchMap, take, takeUntil, tap, } from "rxjs/operators"; import config from "../../config"; import { ICustomError, MediaError, } from "../../errors"; import log from "../../log"; import Manifest, { Adaptation, Period, } from "../../manifest"; import arrayIncludes from "../../utils/array-includes"; import InitializationSegmentCache from "../../utils/initialization_segment_cache"; import SortedList from "../../utils/sorted_list"; import WeakMapMemory from "../../utils/weak_map_memory"; import BufferManager, { createFakeBuffer, IAdaptationBufferEvent, IBufferClockTick, } from "../buffer"; import { IPipelineOptions, SegmentPipelinesManager, } from "../pipelines"; import SourceBufferManager, { getBufferTypes, IBufferType, ITextTrackSourceBufferOptions, QueuedSourceBuffer, } from "../source_buffers"; import ActivePeriodEmitter, { IPeriodBufferInfos, } from "./active_period_emitter"; import SegmentBookkeeper from "./segment_bookkeeper"; import EVENTS, { IActivePeriodChangedEvent, IAdaptationChangeEvent, ICompletedBufferEvent, IEndOfStreamEvent, IPeriodBufferClearedEvent, IPeriodBufferReadyEvent, IResumeStreamEvent, } from "./stream_events"; // Events coming from single PeriodBuffer type IPeriodBufferEvent = IAdaptationBufferEvent<any> | IAdaptationChangeEvent; // Events coming from function(s) managing multiple PeriodBuffers. type IMultiplePeriodBuffersEvent = IPeriodBufferEvent | IPeriodBufferReadyEvent | IPeriodBufferClearedEvent | ICompletedBufferEvent; // Every events sent by the BuffersHandler exported here. export type IBufferHandlerEvent = IActivePeriodChangedEvent | IMultiplePeriodBuffersEvent | IEndOfStreamEvent | IResumeStreamEvent; /** * Create and manage the various Buffer Observables needed for the content to * stream: * * - Create or dispose SourceBuffers depending on the chosen adaptations. * * - Concatenate Buffers for adaptation from separate Periods at the right * time, to allow smooth transitions between periods. * * - Emit events as Period or Adaptations change or as new Period are * prepared. * * Here multiple buffers can be created at the same time to allow smooth * transitions between periods. * To do this, we dynamically create or destroy buffers as they are needed. * * @param {Object} content - The content to play. Contains the following * properties: * - manifest {Manifest} * * - period {Period} - The first period to play in the content * * - clock$ {Observable} - Emit current informations about the content * being played. Also regulate the frequencies of the time the Buffer check * for new its status / new segments. * * - bufferManager {BufferManager} - Will be used to create new * AdaptationBuffers at will * * - sourceBufferManager {SourceBufferManager} - Will be used to lazily * create SourceBuffer instances associated with the current content. * * - segmentPipelinesManager {SegmentPipelinesManager} - Used to download * segments. * * - segmentBookkeeper {WeakMapMemory} - Allow to easily retrieve or create * a unique SegmentBookkeeper per SourceBuffer * * - garbageCollectors {WeakMapMemory} - Allow to easily retrieve or create * a unique Garbage Collector per SourceBuffer * * - options {Object} * * - errorStream {Subject} - Subject to emit minor errors * @returns {Observable} * * TODO Special case for image Buffer, where we want data for EVERY active * periods. * * TODO Special garbage collection for text and image buffers, as we want to * clean it for potentially very long sessions. */ export default function BuffersHandler( content : { manifest : Manifest; period : Period }, clock$ : Observable<IBufferClockTick>, wantedBufferAhead$ : Observable<number>, bufferManager : BufferManager, sourceBufferManager : SourceBufferManager, segmentPipelinesManager : SegmentPipelinesManager<any>, segmentBookkeepers : WeakMapMemory<QueuedSourceBuffer<any>, SegmentBookkeeper>, garbageCollectors : WeakMapMemory<QueuedSourceBuffer<any>, Observable<never>>, options: { maxRetry? : number; maxRetryOffline? : number; textTrackOptions? : ITextTrackSourceBufferOptions; }, errorStream : Subject<Error | ICustomError> ) : Observable<IBufferHandlerEvent> { const manifest = content.manifest; const firstPeriod = content.period; // Initialize all native source buffers from the first period at the same // time. // We cannot lazily create native sourcebuffers since the spec does not // allow adding them during playback. // // From https://w3c.github.io/media-source/#methods // For example, a user agent may throw a QuotaExceededError // exception if the media element has reached the HAVE_METADATA // readyState. This can occur if the user agent's media engine // does not support adding more tracks during playback. createNativeSourceBuffersForPeriod(sourceBufferManager, firstPeriod); const addPeriodBuffer$ = new Subject<IPeriodBufferInfos>(); const removePeriodBuffer$ = new Subject<IPeriodBufferInfos>(); const bufferTypes = getBufferTypes(); /** * Every PeriodBuffers for every possible types * @type {Array.<Observable>} */ const buffersArray = bufferTypes .map((bufferType) => { return manageEveryBuffers(bufferType, firstPeriod) .pipe( tap((evt) => { if (evt.type === "periodBufferReady") { addPeriodBuffer$.next(evt.value); } else if (evt.type === "periodBufferCleared") { removePeriodBuffer$.next(evt.value); } }), share() ); }); /** * Emits the active Period every time it changes * @type {Observable} */ const activePeriod$ : Observable<Period> = ActivePeriodEmitter(bufferTypes, addPeriodBuffer$, removePeriodBuffer$) .pipe(filter((period) : period is Period => !!period)); /** * Emits the activePeriodChanged events every time the active Period changes. * @type {Observable} */ const activePeriodChanged$ = activePeriod$ .pipe( tap((period : Period) => { log.info("new active period", period); }), map(period => EVENTS.activePeriodChanged(period)) ); /** * Emits an "end-of-stream" event once every PeriodBuffer are complete. * @type {Observable} */ const streamHasEnded$ = buffersAreComplete(...buffersArray) .pipe(map((areComplete) => areComplete ? EVENTS.endOfStream() : EVENTS.resumeStream() )); return observableMerge( activePeriodChanged$, ...buffersArray, streamHasEnded$ ); /** * Manage creation and removal of Buffers for every Periods. * * Works by creating consecutive buffers through the * manageConsecutivePeriodBuffers function, and restarting it when the clock * goes out of the bounds of these buffers. * @param {string} bufferType - e.g. "audio" or "video" * @param {Period} basePeriod - Initial Period downloaded. * @returns {Observable} */ function manageEveryBuffers( bufferType : IBufferType, basePeriod : Period ) : Observable<IMultiplePeriodBuffersEvent> { /** * Keep a PeriodList for cases such as seeking ahead/before the * buffers already created. * When that happens, interrupt the previous buffers and create one back * from the new initial period. * @type {ConsecutivePeriodList} */ const periodList = new SortedList<Period>((a, b) => a.start - b.start); /** * Returns true if the given time is either: * - less than the start of the chronologically first Period * - more than the end of the chronologically last Period * @param {number} time * @returns {boolean} */ function isOutOfPeriodList(time : number) : boolean { const head = periodList.head(); const last = periodList.last(); if (head == null || last == null) { // if no period return true; } return head.start > time || (last.end || Infinity) < time; } /** * Destroy the current set of consecutive buffers. * Used when the clocks goes out of the bounds of those, e.g. when the user * seeks. * We can then re-create consecutive buffers, from the new point in time. * @type {Subject} */ const destroyCurrentBuffers = new Subject<void>(); const restartBuffers$ = clock$.pipe( filter(({ currentTime, wantedTimeOffset }) => { if (!manifest.getPeriodForTime(wantedTimeOffset + currentTime)) { // TODO Manage out-of-manifest situations return false; } return isOutOfPeriodList(wantedTimeOffset + currentTime); }), take(1), tap(({ currentTime, wantedTimeOffset }) => { log.info("Current position out of the bounds of the active periods," + "re-creating buffers.", bufferType, currentTime + wantedTimeOffset); destroyCurrentBuffers.next(); }), mergeMap(({ currentTime, wantedTimeOffset }) => { const newInitialPeriod = manifest .getPeriodForTime(currentTime + wantedTimeOffset); if (newInitialPeriod == null) { throw new MediaError("MEDIA_TIME_NOT_FOUND", null, true); } else { // Note: For this to work, manageEveryBuffers should always emit the // "periodBufferReady" event for the new InitialPeriod synchronously return manageEveryBuffers(bufferType, newInitialPeriod); } }) ); const currentBuffers$ = manageConsecutivePeriodBuffers( bufferType, basePeriod, destroyCurrentBuffers ).pipe( tap((message) => { if (message.type === "periodBufferReady") { periodList.add(message.value.period); } else if (message.type === "periodBufferCleared") { periodList.removeFirst(message.value.period); } }), share() // as always, with side-effects ); return observableMerge(currentBuffers$, restartBuffers$); } /** * Manage creation and removal of Buffers for consecutive Periods. * * This function is called recursively for each successive Periods as needed. * * This function does not guarantee creation/destruction of the right Buffers * when the user seeks or rewind in the content. * It only manages regular playback, another layer should be used to manage * those cases. * * You can know about buffers creation and destruction respectively through * the "periodBufferReady" and "periodBufferCleared" events. * * The "periodBufferReady" related to the given period should be sent synchronously * on subscription. * Further "periodBufferReady" for further Periods should be sent each time the * Buffer for the previous Buffer is full. * * Buffers for each Period are cleared ("periodBufferCleared" event) either: * - when it has finished to play (currentTime is after it) * - when one of the older Buffers becomes active again, in which case the * Buffers coming after will be cleared from the newest to the oldest. * - when the destroy$ observable emits, in which case every created Buffer * here will be cleared from the newest to the oldest. * * TODO The code here can surely be greatly simplified. * @param {string} bufferType - e.g. "audio" or "video" * @param {Period} basePeriod - Initial Period downloaded. * @param {Observable} destroy$ - Emit when/if all created Buffer from this * point should be destroyed. * @returns {Observable}
function manageConsecutivePeriodBuffers( bufferType : IBufferType, basePeriod : Period, destroy$ : Observable<void> ) : Observable<IMultiplePeriodBuffersEvent> { log.info("creating new Buffer for", bufferType, basePeriod); /** * Emits the chosen adaptation for the current type. * @type {ReplaySubject} */ const adaptation$ = new ReplaySubject<Adaptation|null>(1); /** * Emits the Period of the next Period Buffer when it can be created. * @type {Subject} */ const createNextPeriodBuffer$ = new Subject<Period>(); /** * Emits when the Buffers for the next Periods should be destroyed, if * created. * @type {Subject} */ const destroyNextBuffers$ = new Subject<void>(); /** * Emits when the current position goes over the end of the current buffer. * @type {Subject} */ const endOfCurrentBuffer$ = clock$ .pipe(filter(({ currentTime, wantedTimeOffset }) => !!basePeriod.end && (currentTime + wantedTimeOffset) >= basePeriod.end )); /** * Create Period Buffer for the next Period. * @type {Observable} */ const nextPeriodBuffer$ = createNextPeriodBuffer$ .pipe(exhaustMap((nextPeriod) => { return manageConsecutivePeriodBuffers( bufferType, nextPeriod, destroyNextBuffers$); })); /** * Allows to destroy each created Buffer, from the newest to the oldest, * once destroy$ emits. * @type {Observable} */ const destroyAll$ = destroy$.pipe( take(1), tap(() => { // first complete createNextBuffer$ to allow completion of the // nextPeriodBuffer$ observable once every further Buffers have been // cleared. createNextPeriodBuffer$.complete(); // emit destruction signal to the next Buffer first destroyNextBuffers$.next(); destroyNextBuffers$.complete(); // we do not need it anymore }), share() // share side-effects ); /** * Will emit when the current buffer should be destroyed. * @type {Observable} */ const killCurrentBuffer$ = observableMerge(endOfCurrentBuffer$, destroyAll$); const periodBuffer$ = createPeriodBuffer(bufferType, basePeriod, adaptation$).pipe( mergeMap(( evt : IPeriodBufferEvent ) : Observable<IMultiplePeriodBuffersEvent> => { const { type } = evt; if (type === "full-buffer") { /** * The Period coming just after the current one. * @type {Period|undefined} */ const nextPeriod = manifest.getPeriodAfter(basePeriod); if (nextPeriod == null) { // no more period, emits event return observableOf(EVENTS.bufferComplete(bufferType)); } else { // current buffer is full, create the next one if not createNextPeriodBuffer$.next(nextPeriod); } } else if (type === "active-buffer") { // current buffer is active, destroy next buffer if created destroyNextBuffers$.next(); } return observableOf(evt); }), share() ); /** * Buffer for the current Period. * @type {Observable} */ const currentBuffer$ : Observable<IMultiplePeriodBuffersEvent> = observableConcat( observableOf(EVENTS.periodBufferReady(bufferType, basePeriod, adaptation$)), periodBuffer$.pipe(takeUntil(killCurrentBuffer$)), observableOf(EVENTS.periodBufferCleared(bufferType, basePeriod)) .pipe(tap(() => { log.info("destroying buffer for", bufferType, basePeriod); })) ); return observableMerge( currentBuffer$, nextPeriodBuffer$, destroyAll$.pipe(ignoreElements()) ); } /** * Create single PeriodBuffer Observable: * - Lazily create (or reuse) a SourceBuffer for the given type. * - Create a Buffer linked to an Adaptation each time it changes, to * download and append the corresponding Segments in the SourceBuffer. * - Announce when the Buffer is full or is awaiting new Segments through * events * * /!\ This Observable has multiple side-effects (creation of SourceBuffers, * downloading and appending of Segments etc.) on subscription. * * @param {string} bufferType * @param {Period} period - The period concerned * @param {Observable} adaptation$ - Emit the chosen adaptation. * Emit null to deactivate a type of adaptation * @returns {Observable} */ function createPeriodBuffer( bufferType : IBufferType, period: Period, adaptation$ : Observable<Adaptation|null> ) : Observable<IPeriodBufferEvent> { return adaptation$.pipe(switchMap((adaptation) => { if (adaptation == null) { log.info(`set no ${bufferType} Adaptation`, period); let cleanBuffer$ : Observable<null>; if (sourceBufferManager.has(bufferType)) { log.info(`clearing previous ${bufferType} SourceBuffer`); const _queuedSourceBuffer = sourceBufferManager.get(bufferType); cleanBuffer$ = _queuedSourceBuffer .removeBuffer({ start: period.start, end: period.end || Infinity }) .pipe(mapTo(null)); } else { cleanBuffer$ = observableOf(null); } return observableConcat( cleanBuffer$.pipe(mapTo(EVENTS.adaptationChange(bufferType, null, period))), createFakeBuffer(clock$, wantedBufferAhead$, bufferType, { manifest, period }) ); } log.info(`updating ${bufferType} adaptation`, adaptation, period); // 1 - create or reuse the SourceBuffer let queuedSourceBuffer : QueuedSourceBuffer<any>; if (sourceBufferManager.has(bufferType)) { log.info("reusing a previous SourceBuffer for the type", bufferType); queuedSourceBuffer = sourceBufferManager.get(bufferType); } else { const codec = getFirstDeclaredMimeType(adaptation); const sourceBufferOptions = bufferType === "text" ? options.textTrackOptions : undefined; queuedSourceBuffer = sourceBufferManager .createSourceBuffer(bufferType, codec, sourceBufferOptions); } // 2 - create or reuse the associated BufferGarbageCollector and // SegmentBookkeeper const bufferGarbageCollector$ = garbageCollectors.get(queuedSourceBuffer); const segmentBookkeeper = segmentBookkeepers.get(queuedSourceBuffer); // TODO Clean previous QueuedSourceBuffer for previous content in the period // // 3 - Clean possible content from a precedent adaptation in this period // // (take the clock into account to avoid removing "now" for native sourceBuffers) // // like: // return clock$.pluck("currentTime").take(1).mergeMap(currentTime => { // }) // 3 - create the pipeline const pipelineOptions = getPipelineOptions( bufferType, options.maxRetry, options.maxRetryOffline); const pipeline = segmentPipelinesManager .createPipeline(bufferType, pipelineOptions); // 4 - create the Buffer const adaptationBuffer$ = bufferManager.createBuffer( clock$, queuedSourceBuffer, segmentBookkeeper, pipeline, wantedBufferAhead$, { manifest, period, adaptation } ).pipe(catchError<IAdaptationBufferEvent<any>, never>((error : Error) => { // non native buffer should not impact the stability of the // player. ie: if a text buffer sends an error, we want to // continue streaming without any subtitles if (!SourceBufferManager.isNative(bufferType)) { log.error("custom buffer: ", bufferType, "has crashed. Aborting it.", error); sourceBufferManager.disposeSourceBuffer(bufferType); errorStream.next(error); return createFakeBuffer( clock$, wantedBufferAhead$, bufferType, { manifest, period }); } log.error( "native buffer: ", bufferType, "has crashed. Stopping playback.", error); throw error; // else, throw })); // 5 - Return the buffer and send right events return observableConcat( observableOf(EVENTS.adaptationChange(bufferType, adaptation, period)), observableMerge(adaptationBuffer$, bufferGarbageCollector$) ); })); } } /** * @param {string} bufferType * @param {number} retry * @param {number} offlineRetry * @returns {Object} - Options to give to the Pipeline */ function getPipelineOptions( bufferType : string, retry? : number, offlineRetry? : number ) : IPipelineOptions<any, any> { const cache = arrayIncludes(["audio", "video"], bufferType) ? new InitializationSegmentCache<any>() : undefined; let maxRetry : number; let maxRetryOffline : number; if (bufferType === "image") { maxRetry = 0; // Deactivate BIF fetching if it fails } else { maxRetry = retry != null ? retry : config.DEFAULT_MAX_PIPELINES_RETRY_ON_ERROR; } maxRetryOffline = offlineRetry != null ? offlineRetry : config.DEFAULT_MAX_PIPELINES_RETRY_ON_OFFLINE; return { cache, maxRetry, maxRetryOffline, }; } /** * Returns an Observable which emits ``undefined`` and complete when all * buffers given are _complete_. * * A PeriodBuffer for a given type is considered _complete_ when both of these * conditions are true: * - it is the last PeriodBuffer in the content for the given type * - it has finished downloading segments (it is _full_) * * Simply put a _complete_ PeriodBuffer for a given type means that every * segments needed for this Buffer have been downloaded. * * When the Observable returned here emits, every Buffer are finished. * @param {...Observable} buffers * @returns {Observable} */ function buffersAreComplete( ...buffers : Array<Observable<IMultiplePeriodBuffersEvent>> ) : Observable<boolean> { /** * Array of Observables linked to the Array of Buffers which emit: * - true when the corresponding buffer is considered _complete_. * - false when the corresponding buffer is considered _active_. * @type {Array.<Observable>} */ const isCompleteArray : Array<Observable<boolean>> = buffers .map((buffer) => { return buffer.pipe( filter((evt) => { return evt.type === "complete-buffer" || evt.type === "active-buffer"; }), map((evt) => evt.type === "complete-buffer"), startWith(false), distinctUntilChanged() ); }); return observableCombineLatest(...isCompleteArray) .pipe( map((areComplete) => areComplete.every((isComplete) => isComplete)), distinctUntilChanged() ); } /** * Get mimetype string of the first representation declared in the given * adaptation. * @param {Adaptation} adaptation * @returns {string} */ function getFirstDeclaredMimeType(adaptation : Adaptation) : string { const { representations } = adaptation; return ( representations[0] && representations[0].getMimeTypeString() ) || ""; } /** * Create all native SourceBuffers needed for a given Period. * * Native Buffers have the particulary to need to be created at the beginning of * the content. * Custom source buffers (entirely managed in JS) can generally be created and * disposed at will during the lifecycle of the content. * @param {SourceBufferManager} sourceBufferManager * @param {Period} period */ function createNativeSourceBuffersForPeriod( sourceBufferManager : SourceBufferManager, period : Period ) : void { Object.keys(period.adaptations).forEach(bufferType => { if (SourceBufferManager.isNative(bufferType)) { const adaptations = period.adaptations[bufferType] || []; const representations = adaptations ? adaptations[0].representations : []; if (representations.length) { const codec = representations[0].getMimeTypeString(); sourceBufferManager.createSourceBuffer(bufferType, codec); } } }); }
*/
random_line_split
buffers_handler.ts
/** * Copyright 2015 CANAL+ Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { combineLatest as observableCombineLatest, concat as observableConcat, merge as observableMerge, Observable, of as observableOf, ReplaySubject, Subject, } from "rxjs"; import { catchError, distinctUntilChanged, exhaustMap, filter, ignoreElements, map, mapTo, mergeMap, share, startWith, switchMap, take, takeUntil, tap, } from "rxjs/operators"; import config from "../../config"; import { ICustomError, MediaError, } from "../../errors"; import log from "../../log"; import Manifest, { Adaptation, Period, } from "../../manifest"; import arrayIncludes from "../../utils/array-includes"; import InitializationSegmentCache from "../../utils/initialization_segment_cache"; import SortedList from "../../utils/sorted_list"; import WeakMapMemory from "../../utils/weak_map_memory"; import BufferManager, { createFakeBuffer, IAdaptationBufferEvent, IBufferClockTick, } from "../buffer"; import { IPipelineOptions, SegmentPipelinesManager, } from "../pipelines"; import SourceBufferManager, { getBufferTypes, IBufferType, ITextTrackSourceBufferOptions, QueuedSourceBuffer, } from "../source_buffers"; import ActivePeriodEmitter, { IPeriodBufferInfos, } from "./active_period_emitter"; import SegmentBookkeeper from "./segment_bookkeeper"; import EVENTS, { IActivePeriodChangedEvent, IAdaptationChangeEvent, ICompletedBufferEvent, IEndOfStreamEvent, IPeriodBufferClearedEvent, IPeriodBufferReadyEvent, IResumeStreamEvent, } from "./stream_events"; // Events coming from single PeriodBuffer type IPeriodBufferEvent = IAdaptationBufferEvent<any> | IAdaptationChangeEvent; // Events coming from function(s) managing multiple PeriodBuffers. type IMultiplePeriodBuffersEvent = IPeriodBufferEvent | IPeriodBufferReadyEvent | IPeriodBufferClearedEvent | ICompletedBufferEvent; // Every events sent by the BuffersHandler exported here. export type IBufferHandlerEvent = IActivePeriodChangedEvent | IMultiplePeriodBuffersEvent | IEndOfStreamEvent | IResumeStreamEvent; /** * Create and manage the various Buffer Observables needed for the content to * stream: * * - Create or dispose SourceBuffers depending on the chosen adaptations. * * - Concatenate Buffers for adaptation from separate Periods at the right * time, to allow smooth transitions between periods. * * - Emit events as Period or Adaptations change or as new Period are * prepared. * * Here multiple buffers can be created at the same time to allow smooth * transitions between periods. * To do this, we dynamically create or destroy buffers as they are needed. * * @param {Object} content - The content to play. Contains the following * properties: * - manifest {Manifest} * * - period {Period} - The first period to play in the content * * - clock$ {Observable} - Emit current informations about the content * being played. Also regulate the frequencies of the time the Buffer check * for new its status / new segments. * * - bufferManager {BufferManager} - Will be used to create new * AdaptationBuffers at will * * - sourceBufferManager {SourceBufferManager} - Will be used to lazily * create SourceBuffer instances associated with the current content. * * - segmentPipelinesManager {SegmentPipelinesManager} - Used to download * segments. * * - segmentBookkeeper {WeakMapMemory} - Allow to easily retrieve or create * a unique SegmentBookkeeper per SourceBuffer * * - garbageCollectors {WeakMapMemory} - Allow to easily retrieve or create * a unique Garbage Collector per SourceBuffer * * - options {Object} * * - errorStream {Subject} - Subject to emit minor errors * @returns {Observable} * * TODO Special case for image Buffer, where we want data for EVERY active * periods. * * TODO Special garbage collection for text and image buffers, as we want to * clean it for potentially very long sessions. */ export default function BuffersHandler( content : { manifest : Manifest; period : Period }, clock$ : Observable<IBufferClockTick>, wantedBufferAhead$ : Observable<number>, bufferManager : BufferManager, sourceBufferManager : SourceBufferManager, segmentPipelinesManager : SegmentPipelinesManager<any>, segmentBookkeepers : WeakMapMemory<QueuedSourceBuffer<any>, SegmentBookkeeper>, garbageCollectors : WeakMapMemory<QueuedSourceBuffer<any>, Observable<never>>, options: { maxRetry? : number; maxRetryOffline? : number; textTrackOptions? : ITextTrackSourceBufferOptions; }, errorStream : Subject<Error | ICustomError> ) : Observable<IBufferHandlerEvent> { const manifest = content.manifest; const firstPeriod = content.period; // Initialize all native source buffers from the first period at the same // time. // We cannot lazily create native sourcebuffers since the spec does not // allow adding them during playback. // // From https://w3c.github.io/media-source/#methods // For example, a user agent may throw a QuotaExceededError // exception if the media element has reached the HAVE_METADATA // readyState. This can occur if the user agent's media engine // does not support adding more tracks during playback. createNativeSourceBuffersForPeriod(sourceBufferManager, firstPeriod); const addPeriodBuffer$ = new Subject<IPeriodBufferInfos>(); const removePeriodBuffer$ = new Subject<IPeriodBufferInfos>(); const bufferTypes = getBufferTypes(); /** * Every PeriodBuffers for every possible types * @type {Array.<Observable>} */ const buffersArray = bufferTypes .map((bufferType) => { return manageEveryBuffers(bufferType, firstPeriod) .pipe( tap((evt) => { if (evt.type === "periodBufferReady") { addPeriodBuffer$.next(evt.value); } else if (evt.type === "periodBufferCleared") { removePeriodBuffer$.next(evt.value); } }), share() ); }); /** * Emits the active Period every time it changes * @type {Observable} */ const activePeriod$ : Observable<Period> = ActivePeriodEmitter(bufferTypes, addPeriodBuffer$, removePeriodBuffer$) .pipe(filter((period) : period is Period => !!period)); /** * Emits the activePeriodChanged events every time the active Period changes. * @type {Observable} */ const activePeriodChanged$ = activePeriod$ .pipe( tap((period : Period) => { log.info("new active period", period); }), map(period => EVENTS.activePeriodChanged(period)) ); /** * Emits an "end-of-stream" event once every PeriodBuffer are complete. * @type {Observable} */ const streamHasEnded$ = buffersAreComplete(...buffersArray) .pipe(map((areComplete) => areComplete ? EVENTS.endOfStream() : EVENTS.resumeStream() )); return observableMerge( activePeriodChanged$, ...buffersArray, streamHasEnded$ ); /** * Manage creation and removal of Buffers for every Periods. * * Works by creating consecutive buffers through the * manageConsecutivePeriodBuffers function, and restarting it when the clock * goes out of the bounds of these buffers. * @param {string} bufferType - e.g. "audio" or "video" * @param {Period} basePeriod - Initial Period downloaded. * @returns {Observable} */ function manageEveryBuffers( bufferType : IBufferType, basePeriod : Period ) : Observable<IMultiplePeriodBuffersEvent> { /** * Keep a PeriodList for cases such as seeking ahead/before the * buffers already created. * When that happens, interrupt the previous buffers and create one back * from the new initial period. * @type {ConsecutivePeriodList} */ const periodList = new SortedList<Period>((a, b) => a.start - b.start); /** * Returns true if the given time is either: * - less than the start of the chronologically first Period * - more than the end of the chronologically last Period * @param {number} time * @returns {boolean} */ function isOutOfPeriodList(time : number) : boolean { const head = periodList.head(); const last = periodList.last(); if (head == null || last == null) { // if no period return true; } return head.start > time || (last.end || Infinity) < time; } /** * Destroy the current set of consecutive buffers. * Used when the clocks goes out of the bounds of those, e.g. when the user * seeks. * We can then re-create consecutive buffers, from the new point in time. * @type {Subject} */ const destroyCurrentBuffers = new Subject<void>(); const restartBuffers$ = clock$.pipe( filter(({ currentTime, wantedTimeOffset }) => { if (!manifest.getPeriodForTime(wantedTimeOffset + currentTime)) { // TODO Manage out-of-manifest situations return false; } return isOutOfPeriodList(wantedTimeOffset + currentTime); }), take(1), tap(({ currentTime, wantedTimeOffset }) => { log.info("Current position out of the bounds of the active periods," + "re-creating buffers.", bufferType, currentTime + wantedTimeOffset); destroyCurrentBuffers.next(); }), mergeMap(({ currentTime, wantedTimeOffset }) => { const newInitialPeriod = manifest .getPeriodForTime(currentTime + wantedTimeOffset); if (newInitialPeriod == null) { throw new MediaError("MEDIA_TIME_NOT_FOUND", null, true); } else { // Note: For this to work, manageEveryBuffers should always emit the // "periodBufferReady" event for the new InitialPeriod synchronously return manageEveryBuffers(bufferType, newInitialPeriod); } }) ); const currentBuffers$ = manageConsecutivePeriodBuffers( bufferType, basePeriod, destroyCurrentBuffers ).pipe( tap((message) => { if (message.type === "periodBufferReady") { periodList.add(message.value.period); } else if (message.type === "periodBufferCleared") { periodList.removeFirst(message.value.period); } }), share() // as always, with side-effects ); return observableMerge(currentBuffers$, restartBuffers$); } /** * Manage creation and removal of Buffers for consecutive Periods. * * This function is called recursively for each successive Periods as needed. * * This function does not guarantee creation/destruction of the right Buffers * when the user seeks or rewind in the content. * It only manages regular playback, another layer should be used to manage * those cases. * * You can know about buffers creation and destruction respectively through * the "periodBufferReady" and "periodBufferCleared" events. * * The "periodBufferReady" related to the given period should be sent synchronously * on subscription. * Further "periodBufferReady" for further Periods should be sent each time the * Buffer for the previous Buffer is full. * * Buffers for each Period are cleared ("periodBufferCleared" event) either: * - when it has finished to play (currentTime is after it) * - when one of the older Buffers becomes active again, in which case the * Buffers coming after will be cleared from the newest to the oldest. * - when the destroy$ observable emits, in which case every created Buffer * here will be cleared from the newest to the oldest. * * TODO The code here can surely be greatly simplified. * @param {string} bufferType - e.g. "audio" or "video" * @param {Period} basePeriod - Initial Period downloaded. * @param {Observable} destroy$ - Emit when/if all created Buffer from this * point should be destroyed. * @returns {Observable} */ function manageConsecutivePeriodBuffers( bufferType : IBufferType, basePeriod : Period, destroy$ : Observable<void> ) : Observable<IMultiplePeriodBuffersEvent> { log.info("creating new Buffer for", bufferType, basePeriod); /** * Emits the chosen adaptation for the current type. * @type {ReplaySubject} */ const adaptation$ = new ReplaySubject<Adaptation|null>(1); /** * Emits the Period of the next Period Buffer when it can be created. * @type {Subject} */ const createNextPeriodBuffer$ = new Subject<Period>(); /** * Emits when the Buffers for the next Periods should be destroyed, if * created. * @type {Subject} */ const destroyNextBuffers$ = new Subject<void>(); /** * Emits when the current position goes over the end of the current buffer. * @type {Subject} */ const endOfCurrentBuffer$ = clock$ .pipe(filter(({ currentTime, wantedTimeOffset }) => !!basePeriod.end && (currentTime + wantedTimeOffset) >= basePeriod.end )); /** * Create Period Buffer for the next Period. * @type {Observable} */ const nextPeriodBuffer$ = createNextPeriodBuffer$ .pipe(exhaustMap((nextPeriod) => { return manageConsecutivePeriodBuffers( bufferType, nextPeriod, destroyNextBuffers$); })); /** * Allows to destroy each created Buffer, from the newest to the oldest, * once destroy$ emits. * @type {Observable} */ const destroyAll$ = destroy$.pipe( take(1), tap(() => { // first complete createNextBuffer$ to allow completion of the // nextPeriodBuffer$ observable once every further Buffers have been // cleared. createNextPeriodBuffer$.complete(); // emit destruction signal to the next Buffer first destroyNextBuffers$.next(); destroyNextBuffers$.complete(); // we do not need it anymore }), share() // share side-effects ); /** * Will emit when the current buffer should be destroyed. * @type {Observable} */ const killCurrentBuffer$ = observableMerge(endOfCurrentBuffer$, destroyAll$); const periodBuffer$ = createPeriodBuffer(bufferType, basePeriod, adaptation$).pipe( mergeMap(( evt : IPeriodBufferEvent ) : Observable<IMultiplePeriodBuffersEvent> => { const { type } = evt; if (type === "full-buffer") { /** * The Period coming just after the current one. * @type {Period|undefined} */ const nextPeriod = manifest.getPeriodAfter(basePeriod); if (nextPeriod == null) { // no more period, emits event return observableOf(EVENTS.bufferComplete(bufferType)); } else { // current buffer is full, create the next one if not createNextPeriodBuffer$.next(nextPeriod); } } else if (type === "active-buffer") { // current buffer is active, destroy next buffer if created destroyNextBuffers$.next(); } return observableOf(evt); }), share() ); /** * Buffer for the current Period. * @type {Observable} */ const currentBuffer$ : Observable<IMultiplePeriodBuffersEvent> = observableConcat( observableOf(EVENTS.periodBufferReady(bufferType, basePeriod, adaptation$)), periodBuffer$.pipe(takeUntil(killCurrentBuffer$)), observableOf(EVENTS.periodBufferCleared(bufferType, basePeriod)) .pipe(tap(() => { log.info("destroying buffer for", bufferType, basePeriod); })) ); return observableMerge( currentBuffer$, nextPeriodBuffer$, destroyAll$.pipe(ignoreElements()) ); } /** * Create single PeriodBuffer Observable: * - Lazily create (or reuse) a SourceBuffer for the given type. * - Create a Buffer linked to an Adaptation each time it changes, to * download and append the corresponding Segments in the SourceBuffer. * - Announce when the Buffer is full or is awaiting new Segments through * events * * /!\ This Observable has multiple side-effects (creation of SourceBuffers, * downloading and appending of Segments etc.) on subscription. * * @param {string} bufferType * @param {Period} period - The period concerned * @param {Observable} adaptation$ - Emit the chosen adaptation. * Emit null to deactivate a type of adaptation * @returns {Observable} */ function createPeriodBuffer( bufferType : IBufferType, period: Period, adaptation$ : Observable<Adaptation|null> ) : Observable<IPeriodBufferEvent> { return adaptation$.pipe(switchMap((adaptation) => { if (adaptation == null) { log.info(`set no ${bufferType} Adaptation`, period); let cleanBuffer$ : Observable<null>; if (sourceBufferManager.has(bufferType)) { log.info(`clearing previous ${bufferType} SourceBuffer`); const _queuedSourceBuffer = sourceBufferManager.get(bufferType); cleanBuffer$ = _queuedSourceBuffer .removeBuffer({ start: period.start, end: period.end || Infinity }) .pipe(mapTo(null)); } else { cleanBuffer$ = observableOf(null); } return observableConcat( cleanBuffer$.pipe(mapTo(EVENTS.adaptationChange(bufferType, null, period))), createFakeBuffer(clock$, wantedBufferAhead$, bufferType, { manifest, period }) ); } log.info(`updating ${bufferType} adaptation`, adaptation, period); // 1 - create or reuse the SourceBuffer let queuedSourceBuffer : QueuedSourceBuffer<any>; if (sourceBufferManager.has(bufferType)) { log.info("reusing a previous SourceBuffer for the type", bufferType); queuedSourceBuffer = sourceBufferManager.get(bufferType); } else { const codec = getFirstDeclaredMimeType(adaptation); const sourceBufferOptions = bufferType === "text" ? options.textTrackOptions : undefined; queuedSourceBuffer = sourceBufferManager .createSourceBuffer(bufferType, codec, sourceBufferOptions); } // 2 - create or reuse the associated BufferGarbageCollector and // SegmentBookkeeper const bufferGarbageCollector$ = garbageCollectors.get(queuedSourceBuffer); const segmentBookkeeper = segmentBookkeepers.get(queuedSourceBuffer); // TODO Clean previous QueuedSourceBuffer for previous content in the period // // 3 - Clean possible content from a precedent adaptation in this period // // (take the clock into account to avoid removing "now" for native sourceBuffers) // // like: // return clock$.pluck("currentTime").take(1).mergeMap(currentTime => { // }) // 3 - create the pipeline const pipelineOptions = getPipelineOptions( bufferType, options.maxRetry, options.maxRetryOffline); const pipeline = segmentPipelinesManager .createPipeline(bufferType, pipelineOptions); // 4 - create the Buffer const adaptationBuffer$ = bufferManager.createBuffer( clock$, queuedSourceBuffer, segmentBookkeeper, pipeline, wantedBufferAhead$, { manifest, period, adaptation } ).pipe(catchError<IAdaptationBufferEvent<any>, never>((error : Error) => { // non native buffer should not impact the stability of the // player. ie: if a text buffer sends an error, we want to // continue streaming without any subtitles if (!SourceBufferManager.isNative(bufferType)) { log.error("custom buffer: ", bufferType, "has crashed. Aborting it.", error); sourceBufferManager.disposeSourceBuffer(bufferType); errorStream.next(error); return createFakeBuffer( clock$, wantedBufferAhead$, bufferType, { manifest, period }); } log.error( "native buffer: ", bufferType, "has crashed. Stopping playback.", error); throw error; // else, throw })); // 5 - Return the buffer and send right events return observableConcat( observableOf(EVENTS.adaptationChange(bufferType, adaptation, period)), observableMerge(adaptationBuffer$, bufferGarbageCollector$) ); })); } } /** * @param {string} bufferType * @param {number} retry * @param {number} offlineRetry * @returns {Object} - Options to give to the Pipeline */ function getPipelineOptions( bufferType : string, retry? : number, offlineRetry? : number ) : IPipelineOptions<any, any> { const cache = arrayIncludes(["audio", "video"], bufferType) ? new InitializationSegmentCache<any>() : undefined; let maxRetry : number; let maxRetryOffline : number; if (bufferType === "image") { maxRetry = 0; // Deactivate BIF fetching if it fails } else { maxRetry = retry != null ? retry : config.DEFAULT_MAX_PIPELINES_RETRY_ON_ERROR; } maxRetryOffline = offlineRetry != null ? offlineRetry : config.DEFAULT_MAX_PIPELINES_RETRY_ON_OFFLINE; return { cache, maxRetry, maxRetryOffline, }; } /** * Returns an Observable which emits ``undefined`` and complete when all * buffers given are _complete_. * * A PeriodBuffer for a given type is considered _complete_ when both of these * conditions are true: * - it is the last PeriodBuffer in the content for the given type * - it has finished downloading segments (it is _full_) * * Simply put a _complete_ PeriodBuffer for a given type means that every * segments needed for this Buffer have been downloaded. * * When the Observable returned here emits, every Buffer are finished. * @param {...Observable} buffers * @returns {Observable} */ function buffersAreComplete( ...buffers : Array<Observable<IMultiplePeriodBuffersEvent>> ) : Observable<boolean> {
/** * Get mimetype string of the first representation declared in the given * adaptation. * @param {Adaptation} adaptation * @returns {string} */ function getFirstDeclaredMimeType(adaptation : Adaptation) : string { const { representations } = adaptation; return ( representations[0] && representations[0].getMimeTypeString() ) || ""; } /** * Create all native SourceBuffers needed for a given Period. * * Native Buffers have the particulary to need to be created at the beginning of * the content. * Custom source buffers (entirely managed in JS) can generally be created and * disposed at will during the lifecycle of the content. * @param {SourceBufferManager} sourceBufferManager * @param {Period} period */ function createNativeSourceBuffersForPeriod( sourceBufferManager : SourceBufferManager, period : Period ) : void { Object.keys(period.adaptations).forEach(bufferType => { if (SourceBufferManager.isNative(bufferType)) { const adaptations = period.adaptations[bufferType] || []; const representations = adaptations ? adaptations[0].representations : []; if (representations.length) { const codec = representations[0].getMimeTypeString(); sourceBufferManager.createSourceBuffer(bufferType, codec); } } }); }
/** * Array of Observables linked to the Array of Buffers which emit: * - true when the corresponding buffer is considered _complete_. * - false when the corresponding buffer is considered _active_. * @type {Array.<Observable>} */ const isCompleteArray : Array<Observable<boolean>> = buffers .map((buffer) => { return buffer.pipe( filter((evt) => { return evt.type === "complete-buffer" || evt.type === "active-buffer"; }), map((evt) => evt.type === "complete-buffer"), startWith(false), distinctUntilChanged() ); }); return observableCombineLatest(...isCompleteArray) .pipe( map((areComplete) => areComplete.every((isComplete) => isComplete)), distinctUntilChanged() ); }
identifier_body
buffers_handler.ts
/** * Copyright 2015 CANAL+ Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { combineLatest as observableCombineLatest, concat as observableConcat, merge as observableMerge, Observable, of as observableOf, ReplaySubject, Subject, } from "rxjs"; import { catchError, distinctUntilChanged, exhaustMap, filter, ignoreElements, map, mapTo, mergeMap, share, startWith, switchMap, take, takeUntil, tap, } from "rxjs/operators"; import config from "../../config"; import { ICustomError, MediaError, } from "../../errors"; import log from "../../log"; import Manifest, { Adaptation, Period, } from "../../manifest"; import arrayIncludes from "../../utils/array-includes"; import InitializationSegmentCache from "../../utils/initialization_segment_cache"; import SortedList from "../../utils/sorted_list"; import WeakMapMemory from "../../utils/weak_map_memory"; import BufferManager, { createFakeBuffer, IAdaptationBufferEvent, IBufferClockTick, } from "../buffer"; import { IPipelineOptions, SegmentPipelinesManager, } from "../pipelines"; import SourceBufferManager, { getBufferTypes, IBufferType, ITextTrackSourceBufferOptions, QueuedSourceBuffer, } from "../source_buffers"; import ActivePeriodEmitter, { IPeriodBufferInfos, } from "./active_period_emitter"; import SegmentBookkeeper from "./segment_bookkeeper"; import EVENTS, { IActivePeriodChangedEvent, IAdaptationChangeEvent, ICompletedBufferEvent, IEndOfStreamEvent, IPeriodBufferClearedEvent, IPeriodBufferReadyEvent, IResumeStreamEvent, } from "./stream_events"; // Events coming from single PeriodBuffer type IPeriodBufferEvent = IAdaptationBufferEvent<any> | IAdaptationChangeEvent; // Events coming from function(s) managing multiple PeriodBuffers. type IMultiplePeriodBuffersEvent = IPeriodBufferEvent | IPeriodBufferReadyEvent | IPeriodBufferClearedEvent | ICompletedBufferEvent; // Every events sent by the BuffersHandler exported here. export type IBufferHandlerEvent = IActivePeriodChangedEvent | IMultiplePeriodBuffersEvent | IEndOfStreamEvent | IResumeStreamEvent; /** * Create and manage the various Buffer Observables needed for the content to * stream: * * - Create or dispose SourceBuffers depending on the chosen adaptations. * * - Concatenate Buffers for adaptation from separate Periods at the right * time, to allow smooth transitions between periods. * * - Emit events as Period or Adaptations change or as new Period are * prepared. * * Here multiple buffers can be created at the same time to allow smooth * transitions between periods. * To do this, we dynamically create or destroy buffers as they are needed. * * @param {Object} content - The content to play. Contains the following * properties: * - manifest {Manifest} * * - period {Period} - The first period to play in the content * * - clock$ {Observable} - Emit current informations about the content * being played. Also regulate the frequencies of the time the Buffer check * for new its status / new segments. * * - bufferManager {BufferManager} - Will be used to create new * AdaptationBuffers at will * * - sourceBufferManager {SourceBufferManager} - Will be used to lazily * create SourceBuffer instances associated with the current content. * * - segmentPipelinesManager {SegmentPipelinesManager} - Used to download * segments. * * - segmentBookkeeper {WeakMapMemory} - Allow to easily retrieve or create * a unique SegmentBookkeeper per SourceBuffer * * - garbageCollectors {WeakMapMemory} - Allow to easily retrieve or create * a unique Garbage Collector per SourceBuffer * * - options {Object} * * - errorStream {Subject} - Subject to emit minor errors * @returns {Observable} * * TODO Special case for image Buffer, where we want data for EVERY active * periods. * * TODO Special garbage collection for text and image buffers, as we want to * clean it for potentially very long sessions. */ export default function BuffersHandler( content : { manifest : Manifest; period : Period }, clock$ : Observable<IBufferClockTick>, wantedBufferAhead$ : Observable<number>, bufferManager : BufferManager, sourceBufferManager : SourceBufferManager, segmentPipelinesManager : SegmentPipelinesManager<any>, segmentBookkeepers : WeakMapMemory<QueuedSourceBuffer<any>, SegmentBookkeeper>, garbageCollectors : WeakMapMemory<QueuedSourceBuffer<any>, Observable<never>>, options: { maxRetry? : number; maxRetryOffline? : number; textTrackOptions? : ITextTrackSourceBufferOptions; }, errorStream : Subject<Error | ICustomError> ) : Observable<IBufferHandlerEvent> { const manifest = content.manifest; const firstPeriod = content.period; // Initialize all native source buffers from the first period at the same // time. // We cannot lazily create native sourcebuffers since the spec does not // allow adding them during playback. // // From https://w3c.github.io/media-source/#methods // For example, a user agent may throw a QuotaExceededError // exception if the media element has reached the HAVE_METADATA // readyState. This can occur if the user agent's media engine // does not support adding more tracks during playback. createNativeSourceBuffersForPeriod(sourceBufferManager, firstPeriod); const addPeriodBuffer$ = new Subject<IPeriodBufferInfos>(); const removePeriodBuffer$ = new Subject<IPeriodBufferInfos>(); const bufferTypes = getBufferTypes(); /** * Every PeriodBuffers for every possible types * @type {Array.<Observable>} */ const buffersArray = bufferTypes .map((bufferType) => { return manageEveryBuffers(bufferType, firstPeriod) .pipe( tap((evt) => { if (evt.type === "periodBufferReady") { addPeriodBuffer$.next(evt.value); } else if (evt.type === "periodBufferCleared") { removePeriodBuffer$.next(evt.value); } }), share() ); }); /** * Emits the active Period every time it changes * @type {Observable} */ const activePeriod$ : Observable<Period> = ActivePeriodEmitter(bufferTypes, addPeriodBuffer$, removePeriodBuffer$) .pipe(filter((period) : period is Period => !!period)); /** * Emits the activePeriodChanged events every time the active Period changes. * @type {Observable} */ const activePeriodChanged$ = activePeriod$ .pipe( tap((period : Period) => { log.info("new active period", period); }), map(period => EVENTS.activePeriodChanged(period)) ); /** * Emits an "end-of-stream" event once every PeriodBuffer are complete. * @type {Observable} */ const streamHasEnded$ = buffersAreComplete(...buffersArray) .pipe(map((areComplete) => areComplete ? EVENTS.endOfStream() : EVENTS.resumeStream() )); return observableMerge( activePeriodChanged$, ...buffersArray, streamHasEnded$ ); /** * Manage creation and removal of Buffers for every Periods. * * Works by creating consecutive buffers through the * manageConsecutivePeriodBuffers function, and restarting it when the clock * goes out of the bounds of these buffers. * @param {string} bufferType - e.g. "audio" or "video" * @param {Period} basePeriod - Initial Period downloaded. * @returns {Observable} */ function manageEveryBuffers( bufferType : IBufferType, basePeriod : Period ) : Observable<IMultiplePeriodBuffersEvent> { /** * Keep a PeriodList for cases such as seeking ahead/before the * buffers already created. * When that happens, interrupt the previous buffers and create one back * from the new initial period. * @type {ConsecutivePeriodList} */ const periodList = new SortedList<Period>((a, b) => a.start - b.start); /** * Returns true if the given time is either: * - less than the start of the chronologically first Period * - more than the end of the chronologically last Period * @param {number} time * @returns {boolean} */ function isOutOfPeriodList(time : number) : boolean { const head = periodList.head(); const last = periodList.last(); if (head == null || last == null) { // if no period return true; } return head.start > time || (last.end || Infinity) < time; } /** * Destroy the current set of consecutive buffers. * Used when the clocks goes out of the bounds of those, e.g. when the user * seeks. * We can then re-create consecutive buffers, from the new point in time. * @type {Subject} */ const destroyCurrentBuffers = new Subject<void>(); const restartBuffers$ = clock$.pipe( filter(({ currentTime, wantedTimeOffset }) => { if (!manifest.getPeriodForTime(wantedTimeOffset + currentTime)) { // TODO Manage out-of-manifest situations return false; } return isOutOfPeriodList(wantedTimeOffset + currentTime); }), take(1), tap(({ currentTime, wantedTimeOffset }) => { log.info("Current position out of the bounds of the active periods," + "re-creating buffers.", bufferType, currentTime + wantedTimeOffset); destroyCurrentBuffers.next(); }), mergeMap(({ currentTime, wantedTimeOffset }) => { const newInitialPeriod = manifest .getPeriodForTime(currentTime + wantedTimeOffset); if (newInitialPeriod == null) { throw new MediaError("MEDIA_TIME_NOT_FOUND", null, true); } else { // Note: For this to work, manageEveryBuffers should always emit the // "periodBufferReady" event for the new InitialPeriod synchronously return manageEveryBuffers(bufferType, newInitialPeriod); } }) ); const currentBuffers$ = manageConsecutivePeriodBuffers( bufferType, basePeriod, destroyCurrentBuffers ).pipe( tap((message) => { if (message.type === "periodBufferReady") { periodList.add(message.value.period); } else if (message.type === "periodBufferCleared") { periodList.removeFirst(message.value.period); } }), share() // as always, with side-effects ); return observableMerge(currentBuffers$, restartBuffers$); } /** * Manage creation and removal of Buffers for consecutive Periods. * * This function is called recursively for each successive Periods as needed. * * This function does not guarantee creation/destruction of the right Buffers * when the user seeks or rewind in the content. * It only manages regular playback, another layer should be used to manage * those cases. * * You can know about buffers creation and destruction respectively through * the "periodBufferReady" and "periodBufferCleared" events. * * The "periodBufferReady" related to the given period should be sent synchronously * on subscription. * Further "periodBufferReady" for further Periods should be sent each time the * Buffer for the previous Buffer is full. * * Buffers for each Period are cleared ("periodBufferCleared" event) either: * - when it has finished to play (currentTime is after it) * - when one of the older Buffers becomes active again, in which case the * Buffers coming after will be cleared from the newest to the oldest. * - when the destroy$ observable emits, in which case every created Buffer * here will be cleared from the newest to the oldest. * * TODO The code here can surely be greatly simplified. * @param {string} bufferType - e.g. "audio" or "video" * @param {Period} basePeriod - Initial Period downloaded. * @param {Observable} destroy$ - Emit when/if all created Buffer from this * point should be destroyed. * @returns {Observable} */ function manageConsecutivePeriodBuffers( bufferType : IBufferType, basePeriod : Period, destroy$ : Observable<void> ) : Observable<IMultiplePeriodBuffersEvent> { log.info("creating new Buffer for", bufferType, basePeriod); /** * Emits the chosen adaptation for the current type. * @type {ReplaySubject} */ const adaptation$ = new ReplaySubject<Adaptation|null>(1); /** * Emits the Period of the next Period Buffer when it can be created. * @type {Subject} */ const createNextPeriodBuffer$ = new Subject<Period>(); /** * Emits when the Buffers for the next Periods should be destroyed, if * created. * @type {Subject} */ const destroyNextBuffers$ = new Subject<void>(); /** * Emits when the current position goes over the end of the current buffer. * @type {Subject} */ const endOfCurrentBuffer$ = clock$ .pipe(filter(({ currentTime, wantedTimeOffset }) => !!basePeriod.end && (currentTime + wantedTimeOffset) >= basePeriod.end )); /** * Create Period Buffer for the next Period. * @type {Observable} */ const nextPeriodBuffer$ = createNextPeriodBuffer$ .pipe(exhaustMap((nextPeriod) => { return manageConsecutivePeriodBuffers( bufferType, nextPeriod, destroyNextBuffers$); })); /** * Allows to destroy each created Buffer, from the newest to the oldest, * once destroy$ emits. * @type {Observable} */ const destroyAll$ = destroy$.pipe( take(1), tap(() => { // first complete createNextBuffer$ to allow completion of the // nextPeriodBuffer$ observable once every further Buffers have been // cleared. createNextPeriodBuffer$.complete(); // emit destruction signal to the next Buffer first destroyNextBuffers$.next(); destroyNextBuffers$.complete(); // we do not need it anymore }), share() // share side-effects ); /** * Will emit when the current buffer should be destroyed. * @type {Observable} */ const killCurrentBuffer$ = observableMerge(endOfCurrentBuffer$, destroyAll$); const periodBuffer$ = createPeriodBuffer(bufferType, basePeriod, adaptation$).pipe( mergeMap(( evt : IPeriodBufferEvent ) : Observable<IMultiplePeriodBuffersEvent> => { const { type } = evt; if (type === "full-buffer") { /** * The Period coming just after the current one. * @type {Period|undefined} */ const nextPeriod = manifest.getPeriodAfter(basePeriod); if (nextPeriod == null) { // no more period, emits event return observableOf(EVENTS.bufferComplete(bufferType)); } else {
} else if (type === "active-buffer") { // current buffer is active, destroy next buffer if created destroyNextBuffers$.next(); } return observableOf(evt); }), share() ); /** * Buffer for the current Period. * @type {Observable} */ const currentBuffer$ : Observable<IMultiplePeriodBuffersEvent> = observableConcat( observableOf(EVENTS.periodBufferReady(bufferType, basePeriod, adaptation$)), periodBuffer$.pipe(takeUntil(killCurrentBuffer$)), observableOf(EVENTS.periodBufferCleared(bufferType, basePeriod)) .pipe(tap(() => { log.info("destroying buffer for", bufferType, basePeriod); })) ); return observableMerge( currentBuffer$, nextPeriodBuffer$, destroyAll$.pipe(ignoreElements()) ); } /** * Create single PeriodBuffer Observable: * - Lazily create (or reuse) a SourceBuffer for the given type. * - Create a Buffer linked to an Adaptation each time it changes, to * download and append the corresponding Segments in the SourceBuffer. * - Announce when the Buffer is full or is awaiting new Segments through * events * * /!\ This Observable has multiple side-effects (creation of SourceBuffers, * downloading and appending of Segments etc.) on subscription. * * @param {string} bufferType * @param {Period} period - The period concerned * @param {Observable} adaptation$ - Emit the chosen adaptation. * Emit null to deactivate a type of adaptation * @returns {Observable} */ function createPeriodBuffer( bufferType : IBufferType, period: Period, adaptation$ : Observable<Adaptation|null> ) : Observable<IPeriodBufferEvent> { return adaptation$.pipe(switchMap((adaptation) => { if (adaptation == null) { log.info(`set no ${bufferType} Adaptation`, period); let cleanBuffer$ : Observable<null>; if (sourceBufferManager.has(bufferType)) { log.info(`clearing previous ${bufferType} SourceBuffer`); const _queuedSourceBuffer = sourceBufferManager.get(bufferType); cleanBuffer$ = _queuedSourceBuffer .removeBuffer({ start: period.start, end: period.end || Infinity }) .pipe(mapTo(null)); } else { cleanBuffer$ = observableOf(null); } return observableConcat( cleanBuffer$.pipe(mapTo(EVENTS.adaptationChange(bufferType, null, period))), createFakeBuffer(clock$, wantedBufferAhead$, bufferType, { manifest, period }) ); } log.info(`updating ${bufferType} adaptation`, adaptation, period); // 1 - create or reuse the SourceBuffer let queuedSourceBuffer : QueuedSourceBuffer<any>; if (sourceBufferManager.has(bufferType)) { log.info("reusing a previous SourceBuffer for the type", bufferType); queuedSourceBuffer = sourceBufferManager.get(bufferType); } else { const codec = getFirstDeclaredMimeType(adaptation); const sourceBufferOptions = bufferType === "text" ? options.textTrackOptions : undefined; queuedSourceBuffer = sourceBufferManager .createSourceBuffer(bufferType, codec, sourceBufferOptions); } // 2 - create or reuse the associated BufferGarbageCollector and // SegmentBookkeeper const bufferGarbageCollector$ = garbageCollectors.get(queuedSourceBuffer); const segmentBookkeeper = segmentBookkeepers.get(queuedSourceBuffer); // TODO Clean previous QueuedSourceBuffer for previous content in the period // // 3 - Clean possible content from a precedent adaptation in this period // // (take the clock into account to avoid removing "now" for native sourceBuffers) // // like: // return clock$.pluck("currentTime").take(1).mergeMap(currentTime => { // }) // 3 - create the pipeline const pipelineOptions = getPipelineOptions( bufferType, options.maxRetry, options.maxRetryOffline); const pipeline = segmentPipelinesManager .createPipeline(bufferType, pipelineOptions); // 4 - create the Buffer const adaptationBuffer$ = bufferManager.createBuffer( clock$, queuedSourceBuffer, segmentBookkeeper, pipeline, wantedBufferAhead$, { manifest, period, adaptation } ).pipe(catchError<IAdaptationBufferEvent<any>, never>((error : Error) => { // non native buffer should not impact the stability of the // player. ie: if a text buffer sends an error, we want to // continue streaming without any subtitles if (!SourceBufferManager.isNative(bufferType)) { log.error("custom buffer: ", bufferType, "has crashed. Aborting it.", error); sourceBufferManager.disposeSourceBuffer(bufferType); errorStream.next(error); return createFakeBuffer( clock$, wantedBufferAhead$, bufferType, { manifest, period }); } log.error( "native buffer: ", bufferType, "has crashed. Stopping playback.", error); throw error; // else, throw })); // 5 - Return the buffer and send right events return observableConcat( observableOf(EVENTS.adaptationChange(bufferType, adaptation, period)), observableMerge(adaptationBuffer$, bufferGarbageCollector$) ); })); } } /** * @param {string} bufferType * @param {number} retry * @param {number} offlineRetry * @returns {Object} - Options to give to the Pipeline */ function getPipelineOptions( bufferType : string, retry? : number, offlineRetry? : number ) : IPipelineOptions<any, any> { const cache = arrayIncludes(["audio", "video"], bufferType) ? new InitializationSegmentCache<any>() : undefined; let maxRetry : number; let maxRetryOffline : number; if (bufferType === "image") { maxRetry = 0; // Deactivate BIF fetching if it fails } else { maxRetry = retry != null ? retry : config.DEFAULT_MAX_PIPELINES_RETRY_ON_ERROR; } maxRetryOffline = offlineRetry != null ? offlineRetry : config.DEFAULT_MAX_PIPELINES_RETRY_ON_OFFLINE; return { cache, maxRetry, maxRetryOffline, }; } /** * Returns an Observable which emits ``undefined`` and complete when all * buffers given are _complete_. * * A PeriodBuffer for a given type is considered _complete_ when both of these * conditions are true: * - it is the last PeriodBuffer in the content for the given type * - it has finished downloading segments (it is _full_) * * Simply put a _complete_ PeriodBuffer for a given type means that every * segments needed for this Buffer have been downloaded. * * When the Observable returned here emits, every Buffer are finished. * @param {...Observable} buffers * @returns {Observable} */ function buffersAreComplete( ...buffers : Array<Observable<IMultiplePeriodBuffersEvent>> ) : Observable<boolean> { /** * Array of Observables linked to the Array of Buffers which emit: * - true when the corresponding buffer is considered _complete_. * - false when the corresponding buffer is considered _active_. * @type {Array.<Observable>} */ const isCompleteArray : Array<Observable<boolean>> = buffers .map((buffer) => { return buffer.pipe( filter((evt) => { return evt.type === "complete-buffer" || evt.type === "active-buffer"; }), map((evt) => evt.type === "complete-buffer"), startWith(false), distinctUntilChanged() ); }); return observableCombineLatest(...isCompleteArray) .pipe( map((areComplete) => areComplete.every((isComplete) => isComplete)), distinctUntilChanged() ); } /** * Get mimetype string of the first representation declared in the given * adaptation. * @param {Adaptation} adaptation * @returns {string} */ function getFirstDeclaredMimeType(adaptation : Adaptation) : string { const { representations } = adaptation; return ( representations[0] && representations[0].getMimeTypeString() ) || ""; } /** * Create all native SourceBuffers needed for a given Period. * * Native Buffers have the particulary to need to be created at the beginning of * the content. * Custom source buffers (entirely managed in JS) can generally be created and * disposed at will during the lifecycle of the content. * @param {SourceBufferManager} sourceBufferManager * @param {Period} period */ function createNativeSourceBuffersForPeriod( sourceBufferManager : SourceBufferManager, period : Period ) : void { Object.keys(period.adaptations).forEach(bufferType => { if (SourceBufferManager.isNative(bufferType)) { const adaptations = period.adaptations[bufferType] || []; const representations = adaptations ? adaptations[0].representations : []; if (representations.length) { const codec = representations[0].getMimeTypeString(); sourceBufferManager.createSourceBuffer(bufferType, codec); } } }); }
// current buffer is full, create the next one if not createNextPeriodBuffer$.next(nextPeriod); }
conditional_block
BrowseButton.js
Ext.namespace('Ext.ux.form'); /** * License: public domain (i.e. use it however you like without any restrictions). * * @class Ext.ux.form.BrowseButton * @extends Ext.Button Ext.Button that provides a customizable file browse button. Clicking this button, pops up a file * dialog box for a user to select the file to upload. This is accomplished by having a transparent <input * type="file"> box above the Ext.Button. When a user thinks he or she is clicking the Ext.Button, they're * actually clicking the hidden input "Browse..." box. Note: this class can be instantiated explicitly or with * xtypes anywhere a regular Ext.Button can be except in 2 scenarios: - Panel.addButton method both as an * instantiated object or as an xtype config object. - Panel.buttons config object as an xtype config object. * These scenarios fail because Ext explicitly creates an Ext.Button in these cases. Browser compatibility: * Internet Explorer 6: - no issues Internet Explorer 7: - no issues Firefox 2 - Windows: - pointer cursor * doesn't display when hovering over the button. Safari 3 - Windows: - no issues. * @author loeppky - based on the work done by MaximGB in Ext.ux.UploadDialog * (http://extjs.com/forum/showthread.php?t=21558) The follow the curosr float div idea also came from MaximGB. * @see http://extjs.com/forum/showthread.php?t=29032 * @constructor Create a new BrowseButton. * @param {Object} * config Configuration options */ Ext.ux.form.BrowseButton = Ext.extend(Ext.Button, { /* * Config options: */ /** * @cfg {String} inputFileName Name to use for the hidden input file DOM element. Deaults to "file". */ inputFileName : 'file', /** * @cfg {Boolean} debug Toggle for turning on debug mode. Debug mode doesn't make clipEl transparent so that one can * see how effectively it covers the Ext.Button. In addition, clipEl is given a green background and floatEl a * red background to see how well they are positioned. */ debug : false, /* * Private constants: */ /** * @property FLOAT_EL_WIDTH * @type Number The width (in pixels) of floatEl. It should be less than the width of the IE "Browse" button's width * (65 pixels), since IE doesn't let you resize it. We define this width so we can quickly center floatEl at * the mouse cursor without having to make any function calls. * @private */ FLOAT_EL_WIDTH : 60, /** * @property FLOAT_EL_HEIGHT * @type Number The heigh (in pixels) of floatEl. It should be less than the height of the "Browse" button's height. * We define this height so we can quickly center floatEl at the mouse cursor without having to make any * function calls. * @private */ FLOAT_EL_HEIGHT : 18, /* * Private properties: */ /** * @property buttonCt * @type Ext.Element Element that contains the actual Button DOM element. We store a reference to it, so we can * easily grab its size for sizing the clipEl. * @private */ buttonCt : null, /** * @property clipEl * @type Ext.Element Element that contains the floatEl. This element is positioned to fill the area of Ext.Button * and has overflow turned off. This keeps floadEl tight to the Ext.Button, and prevents it from masking * surrounding elements. * @private */ clipEl : null, /** * @property floatEl * @type Ext.Element Element that contains the inputFileEl. This element is size to be less than or equal to the * size of the input file "Browse" button. It is then positioned wherever the user moves the cursor, so that * their click always clicks the input file "Browse" button. Overflow is turned off to preven inputFileEl from * masking surrounding elements. * @private */ floatEl : null, /** * @property inputFileEl * @type Ext.Element Element for the hiden file input. * @private */ inputFileEl : null, /** * @property originalHandler * @type Function The handler originally defined for the Ext.Button during construction using the "handler" config * option. We need to null out the "handler" property so that it is only called when a file is selected. * @private */ originalHandler : null, /** * @property originalScope * @type Object The scope originally defined for the Ext.Button during construction using the "scope" config option. * While the "scope" property doesn't need to be nulled, to be consistent with originalHandler, we do. * @private */ originalScope : null, /* * Protected Ext.Button overrides */ /** * @see Ext.Button.initComponent */ initComponent : function() { Ext.ux.form.BrowseButton.superclass.initComponent.call(this); // Store references to the original handler and scope before nulling them. // This is done so that this class can control when the handler is called. // There are some cases where the hidden file input browse button doesn't completely cover the Ext.Button. // The handler shouldn't be called in these cases. It should only be called if a new file is selected on the // file system. this.originalHandler = this.handler; this.originalScope = this.scope; this.handler = null; this.scope = null; }, /** * @see Ext.Button.onRender */ onRender : function(ct, position) { Ext.ux.form.BrowseButton.superclass.onRender.call(this, ct, position); // render the Ext.Button this.buttonCt = this.el.child('.x-btn-center em'); this.buttonCt.position('relative'); // this is important! var styleCfg = { position : 'absolute', overflow : 'hidden', top : '0px', // default left : '0px' // default }; // browser specifics for better overlay tightness if (Ext.isIE) { Ext.apply(styleCfg, { left : '-3px', top : '-3px' }); } else if (Ext.isGecko) { Ext.apply(styleCfg, { left : '-3px', top : '-3px' }); } else if (Ext.isSafari) { Ext.apply(styleCfg, { left : '-4px', top : '-2px' }); } this.clipEl = this.buttonCt.createChild({ tag : 'div', style : styleCfg }); this.setClipSize(); this.clipEl.on({ 'mousemove' : this.onButtonMouseMove, 'mouseover' : this.onButtonMouseMove, scope : this }); this.floatEl = this.clipEl.createChild({ tag : 'div', style : { position : 'absolute', width : this.FLOAT_EL_WIDTH + 'px', height : this.FLOAT_EL_HEIGHT + 'px', overflow : 'hidden' } }); if (this.debug) { this.clipEl.applyStyles({ 'background-color' : 'green' }); this.floatEl.applyStyles({ 'background-color' : 'red' }); } else { // We don't set the clipEl to be transparent, because IE 6/7 occassionaly looses mouse events for // transparent elements. // We have listeners on the clipEl that can't be lost as they're needed for realligning the input file // element. this.floatEl.setOpacity(0.0); } // Cover cases where someone tabs to the button: // Listen to focus of the button so we can translate the focus to the input file el. var buttonEl = this.el.child(this.buttonSelector); buttonEl.on('focus', this.onButtonFocus, this); // In IE, it's possible to tab to the text portion of the input file el. // We want to listen to keyevents so that if a space is pressed, we "click" the input file el. if (Ext.isIE) { this.el.on('keydown', this.onButtonKeyDown, this); } this.createInputFile(); }, /* * Private helper methods: */ /** * Sets the size of clipEl so that is covering as much of the button as possible. * * @private */ setClipSize : function() { if (this.clipEl) { var width = this.buttonCt.getWidth(); var height = this.buttonCt.getHeight(); // The button container can have a width and height of zero when it's rendered in a hidden panel. // This is most noticable when using a card layout, as the items are all rendered but hidden, // (unless deferredRender is set to true). // In this case, the clip size can't be determined, so we attempt to set it later. // This check repeats until the button container has a size. if (width === 0 || height === 0) { this.setClipSize.defer(100, this); } else { if (Ext.isIE) { width = width + 5; height = height + 5; } else if (Ext.isGecko) { width = width + 6; height = height + 6; } else if (Ext.isSafari)
this.clipEl.setSize(width, height); } } }, /** * Creates the input file element and adds it to inputFileCt. The created input file elementis sized, positioned, * and styled appropriately. Event handlers for the element are set up, and a tooltip is applied if defined in the * original config. * * @private */ createInputFile : function() { // When an input file gets detached and set as the child of a different DOM element, // straggling <em> elements get left behind. // I don't know why this happens but we delete any <em> elements we can find under the floatEl to prevent a // memory leak. this.floatEl.select('em').each(function(el) { el.remove(); }); this.inputFileEl = this.floatEl.createChild({ tag : 'input', type : 'file', size : 1, // must be > 0. It's value doesn't really matter due to our masking div (inputFileCt). name : this.inputFileName || Ext.id(this.el), tabindex : this.tabIndex, // Use the same pointer as an Ext.Button would use. This doesn't work in Firefox. // This positioning right-aligns the input file to ensure that the "Browse" button is visible. style : { position : 'absolute', cursor : 'pointer', right : '0px', top : '0px' } }); this.inputFileEl = this.inputFileEl.child('input') || this.inputFileEl; // setup events this.inputFileEl.on({ 'click' : this.onInputFileClick, 'change' : this.onInputFileChange, 'focus' : this.onInputFileFocus, 'select' : this.onInputFileFocus, 'blur' : this.onInputFileBlur, scope : this }); // add a tooltip if (this.tooltip) { if (typeof this.tooltip == 'object') { Ext.QuickTips.register(Ext.apply({ target : this.inputFileEl }, this.tooltip)); } else { this.inputFileEl.dom[this.tooltipType] = this.tooltip; } } }, /** * Redirecting focus to the input file element so the user can press space and select files. * * @param {Event} * e focus event. * @private */ onButtonFocus : function(e) { if (this.inputFileEl) { this.inputFileEl.focus(); e.stopEvent(); } }, /** * Handler for the IE case where once can tab to the text box of an input file el. If the key is a space, we simply * "click" the inputFileEl. * * @param {Event} * e key event. * @private */ onButtonKeyDown : function(e) { if (this.inputFileEl && e.getKey() == Ext.EventObject.SPACE) { this.inputFileEl.dom.click(); e.stopEvent(); } }, /** * Handler when the cursor moves over the clipEl. The floatEl gets centered to the cursor location. * * @param {Event} * e mouse event. * @private */ onButtonMouseMove : function(e) { var xy = e.getXY(); xy[0] -= this.FLOAT_EL_WIDTH / 2; xy[1] -= this.FLOAT_EL_HEIGHT / 2; this.floatEl.setXY(xy); }, /** * Add the visual enhancement to the button when the input file recieves focus. This is the tip for the user that * now he/she can press space to select the file. * * @private */ onInputFileFocus : function(e) { if (!this.isDisabled) { this.el.addClass("x-btn-over"); } }, /** * Removes the visual enhancement from the button. * * @private */ onInputFileBlur : function(e) { this.el.removeClass("x-btn-over"); }, /** * Handler when inputFileEl's "Browse..." button is clicked. * * @param {Event} * e click event. * @private */ onInputFileClick : function(e) { e.stopPropagation(); }, /** * Handler when inputFileEl changes value (i.e. a new file is selected). * * @private */ onInputFileChange : function() { if (this.originalHandler) { this.originalHandler.call(this.originalScope, this); } }, /* * Public methods: */ /** * Detaches the input file associated with this BrowseButton so that it can be used for other purposed (e.g. * uplaoding). The returned input file has all listeners and tooltips applied to it by this class removed. * * @param {Boolean} * whether to create a new input file element for this BrowseButton after detaching. True will prevent * creation. Defaults to false. * @return {Ext.Element} the detached input file element. */ detachInputFile : function(noCreate) { var result = this.inputFileEl; if (typeof this.tooltip == 'object') { Ext.QuickTips.unregister(this.inputFileEl); } else { this.inputFileEl.dom[this.tooltipType] = null; } this.inputFileEl.removeAllListeners(); this.inputFileEl = null; if (!noCreate) { this.createInputFile(); } return result; }, /** * @return {Ext.Element} the input file element attached to this BrowseButton. */ getInputFile : function() { return this.inputFileEl; }, /** * @see Ext.Button.disable */ disable : function() { Ext.ux.form.BrowseButton.superclass.disable.call(this); this.inputFileEl.dom.disabled = true; }, /** * @see Ext.Button.enable */ enable : function() { Ext.ux.form.BrowseButton.superclass.enable.call(this); this.inputFileEl.dom.disabled = false; } }); Ext.reg('browsebutton', Ext.ux.form.BrowseButton);
{ width = width + 6; height = height + 6; }
conditional_block
BrowseButton.js
Ext.namespace('Ext.ux.form'); /** * License: public domain (i.e. use it however you like without any restrictions). * * @class Ext.ux.form.BrowseButton * @extends Ext.Button Ext.Button that provides a customizable file browse button. Clicking this button, pops up a file * dialog box for a user to select the file to upload. This is accomplished by having a transparent <input * type="file"> box above the Ext.Button. When a user thinks he or she is clicking the Ext.Button, they're * actually clicking the hidden input "Browse..." box. Note: this class can be instantiated explicitly or with * xtypes anywhere a regular Ext.Button can be except in 2 scenarios: - Panel.addButton method both as an * instantiated object or as an xtype config object. - Panel.buttons config object as an xtype config object. * These scenarios fail because Ext explicitly creates an Ext.Button in these cases. Browser compatibility: * Internet Explorer 6: - no issues Internet Explorer 7: - no issues Firefox 2 - Windows: - pointer cursor * doesn't display when hovering over the button. Safari 3 - Windows: - no issues. * @author loeppky - based on the work done by MaximGB in Ext.ux.UploadDialog * (http://extjs.com/forum/showthread.php?t=21558) The follow the curosr float div idea also came from MaximGB. * @see http://extjs.com/forum/showthread.php?t=29032 * @constructor Create a new BrowseButton. * @param {Object} * config Configuration options */ Ext.ux.form.BrowseButton = Ext.extend(Ext.Button, { /* * Config options: */ /** * @cfg {String} inputFileName Name to use for the hidden input file DOM element. Deaults to "file". */ inputFileName : 'file', /** * @cfg {Boolean} debug Toggle for turning on debug mode. Debug mode doesn't make clipEl transparent so that one can * see how effectively it covers the Ext.Button. In addition, clipEl is given a green background and floatEl a * red background to see how well they are positioned. */ debug : false, /* * Private constants: */ /** * @property FLOAT_EL_WIDTH * @type Number The width (in pixels) of floatEl. It should be less than the width of the IE "Browse" button's width * (65 pixels), since IE doesn't let you resize it. We define this width so we can quickly center floatEl at * the mouse cursor without having to make any function calls. * @private */ FLOAT_EL_WIDTH : 60, /** * @property FLOAT_EL_HEIGHT * @type Number The heigh (in pixels) of floatEl. It should be less than the height of the "Browse" button's height. * We define this height so we can quickly center floatEl at the mouse cursor without having to make any * function calls. * @private */ FLOAT_EL_HEIGHT : 18, /* * Private properties: */ /** * @property buttonCt * @type Ext.Element Element that contains the actual Button DOM element. We store a reference to it, so we can * easily grab its size for sizing the clipEl. * @private */ buttonCt : null, /** * @property clipEl * @type Ext.Element Element that contains the floatEl. This element is positioned to fill the area of Ext.Button * and has overflow turned off. This keeps floadEl tight to the Ext.Button, and prevents it from masking * surrounding elements. * @private */ clipEl : null, /** * @property floatEl * @type Ext.Element Element that contains the inputFileEl. This element is size to be less than or equal to the * size of the input file "Browse" button. It is then positioned wherever the user moves the cursor, so that * their click always clicks the input file "Browse" button. Overflow is turned off to preven inputFileEl from * masking surrounding elements. * @private */ floatEl : null, /** * @property inputFileEl * @type Ext.Element Element for the hiden file input. * @private */ inputFileEl : null, /** * @property originalHandler * @type Function The handler originally defined for the Ext.Button during construction using the "handler" config * option. We need to null out the "handler" property so that it is only called when a file is selected. * @private */ originalHandler : null, /** * @property originalScope * @type Object The scope originally defined for the Ext.Button during construction using the "scope" config option. * While the "scope" property doesn't need to be nulled, to be consistent with originalHandler, we do. * @private */ originalScope : null, /* * Protected Ext.Button overrides */ /** * @see Ext.Button.initComponent */ initComponent : function() { Ext.ux.form.BrowseButton.superclass.initComponent.call(this); // Store references to the original handler and scope before nulling them. // This is done so that this class can control when the handler is called. // There are some cases where the hidden file input browse button doesn't completely cover the Ext.Button. // The handler shouldn't be called in these cases. It should only be called if a new file is selected on the // file system. this.originalHandler = this.handler; this.originalScope = this.scope; this.handler = null; this.scope = null; }, /** * @see Ext.Button.onRender */ onRender : function(ct, position) { Ext.ux.form.BrowseButton.superclass.onRender.call(this, ct, position); // render the Ext.Button this.buttonCt = this.el.child('.x-btn-center em'); this.buttonCt.position('relative'); // this is important! var styleCfg = { position : 'absolute', overflow : 'hidden', top : '0px', // default left : '0px' // default }; // browser specifics for better overlay tightness if (Ext.isIE) { Ext.apply(styleCfg, { left : '-3px', top : '-3px' }); } else if (Ext.isGecko) { Ext.apply(styleCfg, { left : '-3px', top : '-3px' }); } else if (Ext.isSafari) { Ext.apply(styleCfg, { left : '-4px', top : '-2px' }); } this.clipEl = this.buttonCt.createChild({ tag : 'div', style : styleCfg }); this.setClipSize(); this.clipEl.on({ 'mousemove' : this.onButtonMouseMove, 'mouseover' : this.onButtonMouseMove, scope : this }); this.floatEl = this.clipEl.createChild({ tag : 'div', style : { position : 'absolute', width : this.FLOAT_EL_WIDTH + 'px', height : this.FLOAT_EL_HEIGHT + 'px', overflow : 'hidden' } }); if (this.debug) { this.clipEl.applyStyles({ 'background-color' : 'green' }); this.floatEl.applyStyles({ 'background-color' : 'red' }); } else { // We don't set the clipEl to be transparent, because IE 6/7 occassionaly looses mouse events for // transparent elements. // We have listeners on the clipEl that can't be lost as they're needed for realligning the input file // element. this.floatEl.setOpacity(0.0); } // Cover cases where someone tabs to the button: // Listen to focus of the button so we can translate the focus to the input file el. var buttonEl = this.el.child(this.buttonSelector); buttonEl.on('focus', this.onButtonFocus, this); // In IE, it's possible to tab to the text portion of the input file el. // We want to listen to keyevents so that if a space is pressed, we "click" the input file el. if (Ext.isIE) { this.el.on('keydown', this.onButtonKeyDown, this); } this.createInputFile(); }, /* * Private helper methods: */ /** * Sets the size of clipEl so that is covering as much of the button as possible. * * @private */ setClipSize : function() { if (this.clipEl) { var width = this.buttonCt.getWidth(); var height = this.buttonCt.getHeight(); // The button container can have a width and height of zero when it's rendered in a hidden panel. // This is most noticable when using a card layout, as the items are all rendered but hidden, // (unless deferredRender is set to true). // In this case, the clip size can't be determined, so we attempt to set it later. // This check repeats until the button container has a size. if (width === 0 || height === 0) { this.setClipSize.defer(100, this); } else { if (Ext.isIE) { width = width + 5; height = height + 5; } else if (Ext.isGecko) { width = width + 6; height = height + 6; } else if (Ext.isSafari) { width = width + 6; height = height + 6; } this.clipEl.setSize(width, height); } } }, /** * Creates the input file element and adds it to inputFileCt. The created input file elementis sized, positioned, * and styled appropriately. Event handlers for the element are set up, and a tooltip is applied if defined in the * original config.
*/ createInputFile : function() { // When an input file gets detached and set as the child of a different DOM element, // straggling <em> elements get left behind. // I don't know why this happens but we delete any <em> elements we can find under the floatEl to prevent a // memory leak. this.floatEl.select('em').each(function(el) { el.remove(); }); this.inputFileEl = this.floatEl.createChild({ tag : 'input', type : 'file', size : 1, // must be > 0. It's value doesn't really matter due to our masking div (inputFileCt). name : this.inputFileName || Ext.id(this.el), tabindex : this.tabIndex, // Use the same pointer as an Ext.Button would use. This doesn't work in Firefox. // This positioning right-aligns the input file to ensure that the "Browse" button is visible. style : { position : 'absolute', cursor : 'pointer', right : '0px', top : '0px' } }); this.inputFileEl = this.inputFileEl.child('input') || this.inputFileEl; // setup events this.inputFileEl.on({ 'click' : this.onInputFileClick, 'change' : this.onInputFileChange, 'focus' : this.onInputFileFocus, 'select' : this.onInputFileFocus, 'blur' : this.onInputFileBlur, scope : this }); // add a tooltip if (this.tooltip) { if (typeof this.tooltip == 'object') { Ext.QuickTips.register(Ext.apply({ target : this.inputFileEl }, this.tooltip)); } else { this.inputFileEl.dom[this.tooltipType] = this.tooltip; } } }, /** * Redirecting focus to the input file element so the user can press space and select files. * * @param {Event} * e focus event. * @private */ onButtonFocus : function(e) { if (this.inputFileEl) { this.inputFileEl.focus(); e.stopEvent(); } }, /** * Handler for the IE case where once can tab to the text box of an input file el. If the key is a space, we simply * "click" the inputFileEl. * * @param {Event} * e key event. * @private */ onButtonKeyDown : function(e) { if (this.inputFileEl && e.getKey() == Ext.EventObject.SPACE) { this.inputFileEl.dom.click(); e.stopEvent(); } }, /** * Handler when the cursor moves over the clipEl. The floatEl gets centered to the cursor location. * * @param {Event} * e mouse event. * @private */ onButtonMouseMove : function(e) { var xy = e.getXY(); xy[0] -= this.FLOAT_EL_WIDTH / 2; xy[1] -= this.FLOAT_EL_HEIGHT / 2; this.floatEl.setXY(xy); }, /** * Add the visual enhancement to the button when the input file recieves focus. This is the tip for the user that * now he/she can press space to select the file. * * @private */ onInputFileFocus : function(e) { if (!this.isDisabled) { this.el.addClass("x-btn-over"); } }, /** * Removes the visual enhancement from the button. * * @private */ onInputFileBlur : function(e) { this.el.removeClass("x-btn-over"); }, /** * Handler when inputFileEl's "Browse..." button is clicked. * * @param {Event} * e click event. * @private */ onInputFileClick : function(e) { e.stopPropagation(); }, /** * Handler when inputFileEl changes value (i.e. a new file is selected). * * @private */ onInputFileChange : function() { if (this.originalHandler) { this.originalHandler.call(this.originalScope, this); } }, /* * Public methods: */ /** * Detaches the input file associated with this BrowseButton so that it can be used for other purposed (e.g. * uplaoding). The returned input file has all listeners and tooltips applied to it by this class removed. * * @param {Boolean} * whether to create a new input file element for this BrowseButton after detaching. True will prevent * creation. Defaults to false. * @return {Ext.Element} the detached input file element. */ detachInputFile : function(noCreate) { var result = this.inputFileEl; if (typeof this.tooltip == 'object') { Ext.QuickTips.unregister(this.inputFileEl); } else { this.inputFileEl.dom[this.tooltipType] = null; } this.inputFileEl.removeAllListeners(); this.inputFileEl = null; if (!noCreate) { this.createInputFile(); } return result; }, /** * @return {Ext.Element} the input file element attached to this BrowseButton. */ getInputFile : function() { return this.inputFileEl; }, /** * @see Ext.Button.disable */ disable : function() { Ext.ux.form.BrowseButton.superclass.disable.call(this); this.inputFileEl.dom.disabled = true; }, /** * @see Ext.Button.enable */ enable : function() { Ext.ux.form.BrowseButton.superclass.enable.call(this); this.inputFileEl.dom.disabled = false; } }); Ext.reg('browsebutton', Ext.ux.form.BrowseButton);
* * @private
random_line_split
win_export.py
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved # $Id$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import gtk from gtk import glade import gobject import gettext import common import rpc import service import types import os def export_csv(fname, fields, result, write_title=False): import csv try: fp = file(fname, 'wb+') writer = csv.writer(fp) if write_title: writer.writerow(fields) for data in result: row = [] for d in data: if type(d)==types.StringType: row.append(d.replace('\n',' ').replace('\t',' ')) else: row.append(d) writer.writerow(row) fp.close() common.message(str(len(result))+_(' record(s) saved !')) return True except IOError, (errno, strerror): common.message(_("Operation failed !\nI/O error")+"(%s)" % (errno,)) return False def open_excel(fields, result): if os.name == 'nt': try: from win32com.client import Dispatch xlApp = Dispatch("Excel.Application") xlApp.Workbooks.Add() for col in range(len(fields)): xlApp.ActiveSheet.Cells(1,col+1).Value = fields[col] sht = xlApp.ActiveSheet for a in result: for b in range(len(a)): if type(a[b]) == type(''): a[b]=a[b].decode('utf-8','replace') elif type(a[b]) == type([]): if len(a[b])==2: a[b] = a[b][1].decode('utf-8','replace') else: a[b] = '' sht.Range(sht.Cells(2, 1), sht.Cells(len(result)+1, len(fields))).Value = result xlApp.Visible = 1 except: common.error(_('Error Opening Excel !'),'') else: common.message(_("Function only available for MS Office !\nSorry, OOo users :(")) def datas_read(ids, model, fields, fields_view, prefix='', context=None): ctx = context.copy() ctx.update(rpc.session.context) datas = rpc.session.rpc_exec_auth('/object', 'execute', model, 'export_data', ids, fields, ctx) return datas class win_export(object): def __init__(self, model, ids, fields, preload = [], parent=None, context=None): self.glade = glade.XML(common.terp_path("openerp.glade"), 'win_save_as', gettext.textdomain()) self.win = self.glade.get_widget('win_save_as') self.ids = ids self.model = model self.fields_data = {} if context is None: context = {} self.context = context if parent is None: parent = service.LocalService('gui.main').window self.win.set_transient_for(parent) self.win.set_icon(common.OPENERP_ICON) self.parent = parent self.view1 = gtk.TreeView() self.view1.get_selection().set_mode(gtk.SELECTION_MULTIPLE) self.glade.get_widget('exp_vp1').add(self.view1) self.view2 = gtk.TreeView() self.view2.get_selection().set_mode(gtk.SELECTION_MULTIPLE) self.glade.get_widget('exp_vp2').add(self.view2) self.view1.set_headers_visible(False) self.view2.set_headers_visible(False) cell = gtk.CellRendererText() column = gtk.TreeViewColumn('Field name', cell, text=0, background=2) self.view1.append_column(column) cell = gtk.CellRendererText() column = gtk.TreeViewColumn('Field name', cell, text=0) self.view2.append_column(column) self.model1 = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING) self.model2 = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING) for f in preload: self.model2.set(self.model2.append(), 0, f[1], 1, f[0]) self.fields = {} def model_populate(fields, prefix_node='', prefix=None, prefix_value='', level=2): fields_order = fields.keys() fields_order.sort(lambda x,y: -cmp(fields[x].get('string', ''), fields[y].get('string', ''))) for field in fields_order: self.fields_data[prefix_node+field] = fields[field] if prefix_node: self.fields_data[prefix_node + field]['string'] = '%s%s' % (prefix_value, self.fields_data[prefix_node + field]['string']) st_name = fields[field]['string'] or field node = self.model1.insert(prefix, 0, [st_name, prefix_node+field, (fields[field].get('required', False) and '#ddddff') or 'white']) self.fields[prefix_node+field] = (st_name, fields[field].get('relation', False)) if fields[field].get('relation', False) and level>0: fields2 = rpc.session.rpc_exec_auth('/object', 'execute', fields[field]['relation'], 'fields_get', False, rpc.session.context) fields2.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}}) model_populate(fields2, prefix_node+field+'/', node, st_name+'/', level-1) fields.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}}) model_populate(fields) self.view1.set_model(self.model1) self.view2.set_model(self.model2) self.view1.show_all() self.view2.show_all() self.wid_action = self.glade.get_widget('win_saveas_combo') self.wid_write_field_names = self.glade.get_widget('add_field_names_cb') self.wid_import_compatible = self.glade.get_widget('import_compatible') action = self.wid_action.set_active(os.name!='nt') self.glade.signal_connect('on_but_unselect_all_clicked', self.sig_unsel_all) self.glade.signal_connect('on_but_select_all_clicked', self.sig_sel_all) self.glade.signal_connect('on_but_select_clicked', self.sig_sel) self.glade.signal_connect('on_but_unselect_clicked', self.sig_unsel) self.glade.signal_connect('on_but_predefined_clicked', self.add_predef) self.glade.signal_connect('on_but_delpredefined_clicked', self.del_export_list_btn) # Creating the predefined export view self.pref_export = gtk.TreeView() self.pref_export.append_column(gtk.TreeViewColumn('Export name', gtk.CellRendererText(), text=1)) self.pref_export.append_column(gtk.TreeViewColumn('Exported fields', gtk.CellRendererText(), text=2)) self.glade.get_widget('predefined_exports').add(self.pref_export) self.pref_export.connect("row-activated", self.sel_predef) self.pref_export.connect('key_press_event', self.del_export_list_key) # Fill the predefined export tree view and show everything self.fill_predefwin() self.pref_export.show_all() def del_export_list_key(self,widget, event, *args): if event.keyval==gtk.keysyms.Delete: self.del_selected_export_list() def del_export_list_btn(self, widget=None): self.del_selected_export_list() def del_selected_export_list(self): store, paths = self.pref_export.get_selection().get_selected_rows() for p in paths: export_fields= store.get_value(store.__getitem__(p[0]).iter,0) export_name= store.get_value(store.__getitem__(p[0]).iter,1) ir_export = rpc.RPCProxy('ir.exports') ir_export_line = rpc.RPCProxy('ir.exports.line') export_ids=ir_export.search([('name','=',export_name)]) for id in export_ids: fields=[] line_ids=ir_export_line.search([('export_id','=',id)]) obj_line=ir_export_line.read(line_ids) for i in range(0,len(obj_line)): fields.append(obj_line[i]['name']) if fields==export_fields: ir_export.unlink(id) ir_export_line.unlink(line_ids) store.remove(store.get_iter(p)) break def sig_sel_all(self, widget=None): self.model2.clear() for field, relation in self.fields.keys(): if not relation:
def sig_sel(self, widget=None): sel = self.view1.get_selection() sel.selected_foreach(self._sig_sel_add) def _sig_sel_add(self, store, path, iter): name, relation = self.fields[store.get_value(iter,1)] #if relation: # return num = self.model2.append() self.model2.set(num, 0, store.get_value(iter,0), 1, store.get_value(iter,1)) def sig_unsel(self, widget=None): store, paths = self.view2.get_selection().get_selected_rows() for p in paths: store.remove(store.get_iter(p)) def sig_unsel_all(self, widget=None): self.model2.clear() def fill_predefwin(self): self.predef_model = gtk.ListStore(gobject.TYPE_PYOBJECT, gobject.TYPE_STRING, gobject.TYPE_STRING) ir_export = rpc.RPCProxy('ir.exports') ir_export_line = rpc.RPCProxy('ir.exports.line') export_ids = ir_export.search([('resource', '=', self.model)]) for export in ir_export.read(export_ids): fields = ir_export_line.read(export['export_fields']) self.predef_model.append(([f['name'] for f in fields], export['name'], ', '.join([self.fields_data[f['name']]['string'] for f in fields]))) self.pref_export.set_model(self.predef_model) def add_predef(self, button): name = common.ask('What is the name of this export ?') if not name: return ir_export = rpc.RPCProxy('ir.exports') iter = self.model2.get_iter_root() fields = [] while iter: field_name = self.model2.get_value(iter, 1) fields.append(field_name) iter = self.model2.iter_next(iter) ir_export.create({'name' : name, 'resource' : self.model, 'export_fields' : [(0, 0, {'name' : f}) for f in fields]}) self.predef_model.append((fields, name, ','.join([self.fields_data[f]['string'] for f in fields]))) def sel_predef(self, treeview, path, column): self.model2.clear() for field in self.predef_model[path[0]][0]: self.model2.append((self.fields_data[field]['string'], field)) def go(self): button = self.win.run() if button==gtk.RESPONSE_OK: fields = [] fields2 = [] iter = self.model2.get_iter_root() while iter: fields.append(self.model2.get_value(iter, 1)) fields2.append(self.model2.get_value(iter, 0)) iter = self.model2.iter_next(iter) action = self.wid_action.get_active() self.parent.present() self.win.destroy() import_comp = self.wid_import_compatible.get_active() ctx = self.context.copy() ctx['import_comp'] = import_comp result = datas_read(self.ids, self.model, fields, self.fields_data, context=ctx) if result.get('warning',False): common.message_box(_('Exportation Error !'), unicode(result.get('warning',False))) return False result = result.get('datas',[]) if import_comp: fields2 = fields if not action: open_excel(fields2, result) else: fname = common.file_selection(_('Save As...'), parent=self.parent, action=gtk.FILE_CHOOSER_ACTION_SAVE) if fname: export_csv(fname, fields2, result, self.wid_write_field_names.get_active()) return True else: self.parent.present() self.win.destroy() return False # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
self.model2.set(self.model2.append(), 0, self.fields[field], 1, field)
conditional_block
win_export.py
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved # $Id$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import gtk from gtk import glade import gobject import gettext import common import rpc import service import types import os def export_csv(fname, fields, result, write_title=False): import csv try: fp = file(fname, 'wb+') writer = csv.writer(fp) if write_title: writer.writerow(fields) for data in result: row = [] for d in data: if type(d)==types.StringType: row.append(d.replace('\n',' ').replace('\t',' ')) else: row.append(d) writer.writerow(row) fp.close() common.message(str(len(result))+_(' record(s) saved !')) return True except IOError, (errno, strerror): common.message(_("Operation failed !\nI/O error")+"(%s)" % (errno,)) return False def open_excel(fields, result): if os.name == 'nt': try: from win32com.client import Dispatch xlApp = Dispatch("Excel.Application") xlApp.Workbooks.Add() for col in range(len(fields)): xlApp.ActiveSheet.Cells(1,col+1).Value = fields[col] sht = xlApp.ActiveSheet for a in result: for b in range(len(a)): if type(a[b]) == type(''): a[b]=a[b].decode('utf-8','replace') elif type(a[b]) == type([]): if len(a[b])==2: a[b] = a[b][1].decode('utf-8','replace') else: a[b] = '' sht.Range(sht.Cells(2, 1), sht.Cells(len(result)+1, len(fields))).Value = result xlApp.Visible = 1 except: common.error(_('Error Opening Excel !'),'') else: common.message(_("Function only available for MS Office !\nSorry, OOo users :(")) def datas_read(ids, model, fields, fields_view, prefix='', context=None): ctx = context.copy() ctx.update(rpc.session.context) datas = rpc.session.rpc_exec_auth('/object', 'execute', model, 'export_data', ids, fields, ctx) return datas class win_export(object): def __init__(self, model, ids, fields, preload = [], parent=None, context=None): self.glade = glade.XML(common.terp_path("openerp.glade"), 'win_save_as', gettext.textdomain()) self.win = self.glade.get_widget('win_save_as') self.ids = ids self.model = model self.fields_data = {} if context is None: context = {} self.context = context if parent is None: parent = service.LocalService('gui.main').window self.win.set_transient_for(parent) self.win.set_icon(common.OPENERP_ICON) self.parent = parent self.view1 = gtk.TreeView() self.view1.get_selection().set_mode(gtk.SELECTION_MULTIPLE) self.glade.get_widget('exp_vp1').add(self.view1) self.view2 = gtk.TreeView() self.view2.get_selection().set_mode(gtk.SELECTION_MULTIPLE) self.glade.get_widget('exp_vp2').add(self.view2) self.view1.set_headers_visible(False) self.view2.set_headers_visible(False) cell = gtk.CellRendererText() column = gtk.TreeViewColumn('Field name', cell, text=0, background=2) self.view1.append_column(column) cell = gtk.CellRendererText() column = gtk.TreeViewColumn('Field name', cell, text=0) self.view2.append_column(column) self.model1 = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING) self.model2 = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING) for f in preload: self.model2.set(self.model2.append(), 0, f[1], 1, f[0]) self.fields = {} def model_populate(fields, prefix_node='', prefix=None, prefix_value='', level=2): fields_order = fields.keys() fields_order.sort(lambda x,y: -cmp(fields[x].get('string', ''), fields[y].get('string', ''))) for field in fields_order: self.fields_data[prefix_node+field] = fields[field] if prefix_node: self.fields_data[prefix_node + field]['string'] = '%s%s' % (prefix_value, self.fields_data[prefix_node + field]['string']) st_name = fields[field]['string'] or field node = self.model1.insert(prefix, 0, [st_name, prefix_node+field, (fields[field].get('required', False) and '#ddddff') or 'white']) self.fields[prefix_node+field] = (st_name, fields[field].get('relation', False)) if fields[field].get('relation', False) and level>0: fields2 = rpc.session.rpc_exec_auth('/object', 'execute', fields[field]['relation'], 'fields_get', False, rpc.session.context) fields2.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}}) model_populate(fields2, prefix_node+field+'/', node, st_name+'/', level-1) fields.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}}) model_populate(fields) self.view1.set_model(self.model1) self.view2.set_model(self.model2) self.view1.show_all() self.view2.show_all() self.wid_action = self.glade.get_widget('win_saveas_combo') self.wid_write_field_names = self.glade.get_widget('add_field_names_cb') self.wid_import_compatible = self.glade.get_widget('import_compatible') action = self.wid_action.set_active(os.name!='nt') self.glade.signal_connect('on_but_unselect_all_clicked', self.sig_unsel_all) self.glade.signal_connect('on_but_select_all_clicked', self.sig_sel_all) self.glade.signal_connect('on_but_select_clicked', self.sig_sel) self.glade.signal_connect('on_but_unselect_clicked', self.sig_unsel) self.glade.signal_connect('on_but_predefined_clicked', self.add_predef) self.glade.signal_connect('on_but_delpredefined_clicked', self.del_export_list_btn) # Creating the predefined export view self.pref_export = gtk.TreeView() self.pref_export.append_column(gtk.TreeViewColumn('Export name', gtk.CellRendererText(), text=1)) self.pref_export.append_column(gtk.TreeViewColumn('Exported fields', gtk.CellRendererText(), text=2)) self.glade.get_widget('predefined_exports').add(self.pref_export) self.pref_export.connect("row-activated", self.sel_predef) self.pref_export.connect('key_press_event', self.del_export_list_key) # Fill the predefined export tree view and show everything self.fill_predefwin()
self.pref_export.show_all() def del_export_list_key(self,widget, event, *args): if event.keyval==gtk.keysyms.Delete: self.del_selected_export_list() def del_export_list_btn(self, widget=None): self.del_selected_export_list() def del_selected_export_list(self): store, paths = self.pref_export.get_selection().get_selected_rows() for p in paths: export_fields= store.get_value(store.__getitem__(p[0]).iter,0) export_name= store.get_value(store.__getitem__(p[0]).iter,1) ir_export = rpc.RPCProxy('ir.exports') ir_export_line = rpc.RPCProxy('ir.exports.line') export_ids=ir_export.search([('name','=',export_name)]) for id in export_ids: fields=[] line_ids=ir_export_line.search([('export_id','=',id)]) obj_line=ir_export_line.read(line_ids) for i in range(0,len(obj_line)): fields.append(obj_line[i]['name']) if fields==export_fields: ir_export.unlink(id) ir_export_line.unlink(line_ids) store.remove(store.get_iter(p)) break def sig_sel_all(self, widget=None): self.model2.clear() for field, relation in self.fields.keys(): if not relation: self.model2.set(self.model2.append(), 0, self.fields[field], 1, field) def sig_sel(self, widget=None): sel = self.view1.get_selection() sel.selected_foreach(self._sig_sel_add) def _sig_sel_add(self, store, path, iter): name, relation = self.fields[store.get_value(iter,1)] #if relation: # return num = self.model2.append() self.model2.set(num, 0, store.get_value(iter,0), 1, store.get_value(iter,1)) def sig_unsel(self, widget=None): store, paths = self.view2.get_selection().get_selected_rows() for p in paths: store.remove(store.get_iter(p)) def sig_unsel_all(self, widget=None): self.model2.clear() def fill_predefwin(self): self.predef_model = gtk.ListStore(gobject.TYPE_PYOBJECT, gobject.TYPE_STRING, gobject.TYPE_STRING) ir_export = rpc.RPCProxy('ir.exports') ir_export_line = rpc.RPCProxy('ir.exports.line') export_ids = ir_export.search([('resource', '=', self.model)]) for export in ir_export.read(export_ids): fields = ir_export_line.read(export['export_fields']) self.predef_model.append(([f['name'] for f in fields], export['name'], ', '.join([self.fields_data[f['name']]['string'] for f in fields]))) self.pref_export.set_model(self.predef_model) def add_predef(self, button): name = common.ask('What is the name of this export ?') if not name: return ir_export = rpc.RPCProxy('ir.exports') iter = self.model2.get_iter_root() fields = [] while iter: field_name = self.model2.get_value(iter, 1) fields.append(field_name) iter = self.model2.iter_next(iter) ir_export.create({'name' : name, 'resource' : self.model, 'export_fields' : [(0, 0, {'name' : f}) for f in fields]}) self.predef_model.append((fields, name, ','.join([self.fields_data[f]['string'] for f in fields]))) def sel_predef(self, treeview, path, column): self.model2.clear() for field in self.predef_model[path[0]][0]: self.model2.append((self.fields_data[field]['string'], field)) def go(self): button = self.win.run() if button==gtk.RESPONSE_OK: fields = [] fields2 = [] iter = self.model2.get_iter_root() while iter: fields.append(self.model2.get_value(iter, 1)) fields2.append(self.model2.get_value(iter, 0)) iter = self.model2.iter_next(iter) action = self.wid_action.get_active() self.parent.present() self.win.destroy() import_comp = self.wid_import_compatible.get_active() ctx = self.context.copy() ctx['import_comp'] = import_comp result = datas_read(self.ids, self.model, fields, self.fields_data, context=ctx) if result.get('warning',False): common.message_box(_('Exportation Error !'), unicode(result.get('warning',False))) return False result = result.get('datas',[]) if import_comp: fields2 = fields if not action: open_excel(fields2, result) else: fname = common.file_selection(_('Save As...'), parent=self.parent, action=gtk.FILE_CHOOSER_ACTION_SAVE) if fname: export_csv(fname, fields2, result, self.wid_write_field_names.get_active()) return True else: self.parent.present() self.win.destroy() return False # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
random_line_split
win_export.py
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved # $Id$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import gtk from gtk import glade import gobject import gettext import common import rpc import service import types import os def export_csv(fname, fields, result, write_title=False): import csv try: fp = file(fname, 'wb+') writer = csv.writer(fp) if write_title: writer.writerow(fields) for data in result: row = [] for d in data: if type(d)==types.StringType: row.append(d.replace('\n',' ').replace('\t',' ')) else: row.append(d) writer.writerow(row) fp.close() common.message(str(len(result))+_(' record(s) saved !')) return True except IOError, (errno, strerror): common.message(_("Operation failed !\nI/O error")+"(%s)" % (errno,)) return False def open_excel(fields, result): if os.name == 'nt': try: from win32com.client import Dispatch xlApp = Dispatch("Excel.Application") xlApp.Workbooks.Add() for col in range(len(fields)): xlApp.ActiveSheet.Cells(1,col+1).Value = fields[col] sht = xlApp.ActiveSheet for a in result: for b in range(len(a)): if type(a[b]) == type(''): a[b]=a[b].decode('utf-8','replace') elif type(a[b]) == type([]): if len(a[b])==2: a[b] = a[b][1].decode('utf-8','replace') else: a[b] = '' sht.Range(sht.Cells(2, 1), sht.Cells(len(result)+1, len(fields))).Value = result xlApp.Visible = 1 except: common.error(_('Error Opening Excel !'),'') else: common.message(_("Function only available for MS Office !\nSorry, OOo users :(")) def datas_read(ids, model, fields, fields_view, prefix='', context=None): ctx = context.copy() ctx.update(rpc.session.context) datas = rpc.session.rpc_exec_auth('/object', 'execute', model, 'export_data', ids, fields, ctx) return datas class win_export(object): def __init__(self, model, ids, fields, preload = [], parent=None, context=None):
def del_export_list_key(self,widget, event, *args): if event.keyval==gtk.keysyms.Delete: self.del_selected_export_list() def del_export_list_btn(self, widget=None): self.del_selected_export_list() def del_selected_export_list(self): store, paths = self.pref_export.get_selection().get_selected_rows() for p in paths: export_fields= store.get_value(store.__getitem__(p[0]).iter,0) export_name= store.get_value(store.__getitem__(p[0]).iter,1) ir_export = rpc.RPCProxy('ir.exports') ir_export_line = rpc.RPCProxy('ir.exports.line') export_ids=ir_export.search([('name','=',export_name)]) for id in export_ids: fields=[] line_ids=ir_export_line.search([('export_id','=',id)]) obj_line=ir_export_line.read(line_ids) for i in range(0,len(obj_line)): fields.append(obj_line[i]['name']) if fields==export_fields: ir_export.unlink(id) ir_export_line.unlink(line_ids) store.remove(store.get_iter(p)) break def sig_sel_all(self, widget=None): self.model2.clear() for field, relation in self.fields.keys(): if not relation: self.model2.set(self.model2.append(), 0, self.fields[field], 1, field) def sig_sel(self, widget=None): sel = self.view1.get_selection() sel.selected_foreach(self._sig_sel_add) def _sig_sel_add(self, store, path, iter): name, relation = self.fields[store.get_value(iter,1)] #if relation: # return num = self.model2.append() self.model2.set(num, 0, store.get_value(iter,0), 1, store.get_value(iter,1)) def sig_unsel(self, widget=None): store, paths = self.view2.get_selection().get_selected_rows() for p in paths: store.remove(store.get_iter(p)) def sig_unsel_all(self, widget=None): self.model2.clear() def fill_predefwin(self): self.predef_model = gtk.ListStore(gobject.TYPE_PYOBJECT, gobject.TYPE_STRING, gobject.TYPE_STRING) ir_export = rpc.RPCProxy('ir.exports') ir_export_line = rpc.RPCProxy('ir.exports.line') export_ids = ir_export.search([('resource', '=', self.model)]) for export in ir_export.read(export_ids): fields = ir_export_line.read(export['export_fields']) self.predef_model.append(([f['name'] for f in fields], export['name'], ', '.join([self.fields_data[f['name']]['string'] for f in fields]))) self.pref_export.set_model(self.predef_model) def add_predef(self, button): name = common.ask('What is the name of this export ?') if not name: return ir_export = rpc.RPCProxy('ir.exports') iter = self.model2.get_iter_root() fields = [] while iter: field_name = self.model2.get_value(iter, 1) fields.append(field_name) iter = self.model2.iter_next(iter) ir_export.create({'name' : name, 'resource' : self.model, 'export_fields' : [(0, 0, {'name' : f}) for f in fields]}) self.predef_model.append((fields, name, ','.join([self.fields_data[f]['string'] for f in fields]))) def sel_predef(self, treeview, path, column): self.model2.clear() for field in self.predef_model[path[0]][0]: self.model2.append((self.fields_data[field]['string'], field)) def go(self): button = self.win.run() if button==gtk.RESPONSE_OK: fields = [] fields2 = [] iter = self.model2.get_iter_root() while iter: fields.append(self.model2.get_value(iter, 1)) fields2.append(self.model2.get_value(iter, 0)) iter = self.model2.iter_next(iter) action = self.wid_action.get_active() self.parent.present() self.win.destroy() import_comp = self.wid_import_compatible.get_active() ctx = self.context.copy() ctx['import_comp'] = import_comp result = datas_read(self.ids, self.model, fields, self.fields_data, context=ctx) if result.get('warning',False): common.message_box(_('Exportation Error !'), unicode(result.get('warning',False))) return False result = result.get('datas',[]) if import_comp: fields2 = fields if not action: open_excel(fields2, result) else: fname = common.file_selection(_('Save As...'), parent=self.parent, action=gtk.FILE_CHOOSER_ACTION_SAVE) if fname: export_csv(fname, fields2, result, self.wid_write_field_names.get_active()) return True else: self.parent.present() self.win.destroy() return False # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
self.glade = glade.XML(common.terp_path("openerp.glade"), 'win_save_as', gettext.textdomain()) self.win = self.glade.get_widget('win_save_as') self.ids = ids self.model = model self.fields_data = {} if context is None: context = {} self.context = context if parent is None: parent = service.LocalService('gui.main').window self.win.set_transient_for(parent) self.win.set_icon(common.OPENERP_ICON) self.parent = parent self.view1 = gtk.TreeView() self.view1.get_selection().set_mode(gtk.SELECTION_MULTIPLE) self.glade.get_widget('exp_vp1').add(self.view1) self.view2 = gtk.TreeView() self.view2.get_selection().set_mode(gtk.SELECTION_MULTIPLE) self.glade.get_widget('exp_vp2').add(self.view2) self.view1.set_headers_visible(False) self.view2.set_headers_visible(False) cell = gtk.CellRendererText() column = gtk.TreeViewColumn('Field name', cell, text=0, background=2) self.view1.append_column(column) cell = gtk.CellRendererText() column = gtk.TreeViewColumn('Field name', cell, text=0) self.view2.append_column(column) self.model1 = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING) self.model2 = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING) for f in preload: self.model2.set(self.model2.append(), 0, f[1], 1, f[0]) self.fields = {} def model_populate(fields, prefix_node='', prefix=None, prefix_value='', level=2): fields_order = fields.keys() fields_order.sort(lambda x,y: -cmp(fields[x].get('string', ''), fields[y].get('string', ''))) for field in fields_order: self.fields_data[prefix_node+field] = fields[field] if prefix_node: self.fields_data[prefix_node + field]['string'] = '%s%s' % (prefix_value, self.fields_data[prefix_node + field]['string']) st_name = fields[field]['string'] or field node = self.model1.insert(prefix, 0, [st_name, prefix_node+field, (fields[field].get('required', False) and '#ddddff') or 'white']) self.fields[prefix_node+field] = (st_name, fields[field].get('relation', False)) if fields[field].get('relation', False) and level>0: fields2 = rpc.session.rpc_exec_auth('/object', 'execute', fields[field]['relation'], 'fields_get', False, rpc.session.context) fields2.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}}) model_populate(fields2, prefix_node+field+'/', node, st_name+'/', level-1) fields.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}}) model_populate(fields) self.view1.set_model(self.model1) self.view2.set_model(self.model2) self.view1.show_all() self.view2.show_all() self.wid_action = self.glade.get_widget('win_saveas_combo') self.wid_write_field_names = self.glade.get_widget('add_field_names_cb') self.wid_import_compatible = self.glade.get_widget('import_compatible') action = self.wid_action.set_active(os.name!='nt') self.glade.signal_connect('on_but_unselect_all_clicked', self.sig_unsel_all) self.glade.signal_connect('on_but_select_all_clicked', self.sig_sel_all) self.glade.signal_connect('on_but_select_clicked', self.sig_sel) self.glade.signal_connect('on_but_unselect_clicked', self.sig_unsel) self.glade.signal_connect('on_but_predefined_clicked', self.add_predef) self.glade.signal_connect('on_but_delpredefined_clicked', self.del_export_list_btn) # Creating the predefined export view self.pref_export = gtk.TreeView() self.pref_export.append_column(gtk.TreeViewColumn('Export name', gtk.CellRendererText(), text=1)) self.pref_export.append_column(gtk.TreeViewColumn('Exported fields', gtk.CellRendererText(), text=2)) self.glade.get_widget('predefined_exports').add(self.pref_export) self.pref_export.connect("row-activated", self.sel_predef) self.pref_export.connect('key_press_event', self.del_export_list_key) # Fill the predefined export tree view and show everything self.fill_predefwin() self.pref_export.show_all()
identifier_body
win_export.py
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved # $Id$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import gtk from gtk import glade import gobject import gettext import common import rpc import service import types import os def export_csv(fname, fields, result, write_title=False): import csv try: fp = file(fname, 'wb+') writer = csv.writer(fp) if write_title: writer.writerow(fields) for data in result: row = [] for d in data: if type(d)==types.StringType: row.append(d.replace('\n',' ').replace('\t',' ')) else: row.append(d) writer.writerow(row) fp.close() common.message(str(len(result))+_(' record(s) saved !')) return True except IOError, (errno, strerror): common.message(_("Operation failed !\nI/O error")+"(%s)" % (errno,)) return False def open_excel(fields, result): if os.name == 'nt': try: from win32com.client import Dispatch xlApp = Dispatch("Excel.Application") xlApp.Workbooks.Add() for col in range(len(fields)): xlApp.ActiveSheet.Cells(1,col+1).Value = fields[col] sht = xlApp.ActiveSheet for a in result: for b in range(len(a)): if type(a[b]) == type(''): a[b]=a[b].decode('utf-8','replace') elif type(a[b]) == type([]): if len(a[b])==2: a[b] = a[b][1].decode('utf-8','replace') else: a[b] = '' sht.Range(sht.Cells(2, 1), sht.Cells(len(result)+1, len(fields))).Value = result xlApp.Visible = 1 except: common.error(_('Error Opening Excel !'),'') else: common.message(_("Function only available for MS Office !\nSorry, OOo users :(")) def datas_read(ids, model, fields, fields_view, prefix='', context=None): ctx = context.copy() ctx.update(rpc.session.context) datas = rpc.session.rpc_exec_auth('/object', 'execute', model, 'export_data', ids, fields, ctx) return datas class win_export(object): def __init__(self, model, ids, fields, preload = [], parent=None, context=None): self.glade = glade.XML(common.terp_path("openerp.glade"), 'win_save_as', gettext.textdomain()) self.win = self.glade.get_widget('win_save_as') self.ids = ids self.model = model self.fields_data = {} if context is None: context = {} self.context = context if parent is None: parent = service.LocalService('gui.main').window self.win.set_transient_for(parent) self.win.set_icon(common.OPENERP_ICON) self.parent = parent self.view1 = gtk.TreeView() self.view1.get_selection().set_mode(gtk.SELECTION_MULTIPLE) self.glade.get_widget('exp_vp1').add(self.view1) self.view2 = gtk.TreeView() self.view2.get_selection().set_mode(gtk.SELECTION_MULTIPLE) self.glade.get_widget('exp_vp2').add(self.view2) self.view1.set_headers_visible(False) self.view2.set_headers_visible(False) cell = gtk.CellRendererText() column = gtk.TreeViewColumn('Field name', cell, text=0, background=2) self.view1.append_column(column) cell = gtk.CellRendererText() column = gtk.TreeViewColumn('Field name', cell, text=0) self.view2.append_column(column) self.model1 = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING) self.model2 = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING) for f in preload: self.model2.set(self.model2.append(), 0, f[1], 1, f[0]) self.fields = {} def model_populate(fields, prefix_node='', prefix=None, prefix_value='', level=2): fields_order = fields.keys() fields_order.sort(lambda x,y: -cmp(fields[x].get('string', ''), fields[y].get('string', ''))) for field in fields_order: self.fields_data[prefix_node+field] = fields[field] if prefix_node: self.fields_data[prefix_node + field]['string'] = '%s%s' % (prefix_value, self.fields_data[prefix_node + field]['string']) st_name = fields[field]['string'] or field node = self.model1.insert(prefix, 0, [st_name, prefix_node+field, (fields[field].get('required', False) and '#ddddff') or 'white']) self.fields[prefix_node+field] = (st_name, fields[field].get('relation', False)) if fields[field].get('relation', False) and level>0: fields2 = rpc.session.rpc_exec_auth('/object', 'execute', fields[field]['relation'], 'fields_get', False, rpc.session.context) fields2.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}}) model_populate(fields2, prefix_node+field+'/', node, st_name+'/', level-1) fields.update({'id':{'string':'ID'},'db_id':{'string':'Database ID'}}) model_populate(fields) self.view1.set_model(self.model1) self.view2.set_model(self.model2) self.view1.show_all() self.view2.show_all() self.wid_action = self.glade.get_widget('win_saveas_combo') self.wid_write_field_names = self.glade.get_widget('add_field_names_cb') self.wid_import_compatible = self.glade.get_widget('import_compatible') action = self.wid_action.set_active(os.name!='nt') self.glade.signal_connect('on_but_unselect_all_clicked', self.sig_unsel_all) self.glade.signal_connect('on_but_select_all_clicked', self.sig_sel_all) self.glade.signal_connect('on_but_select_clicked', self.sig_sel) self.glade.signal_connect('on_but_unselect_clicked', self.sig_unsel) self.glade.signal_connect('on_but_predefined_clicked', self.add_predef) self.glade.signal_connect('on_but_delpredefined_clicked', self.del_export_list_btn) # Creating the predefined export view self.pref_export = gtk.TreeView() self.pref_export.append_column(gtk.TreeViewColumn('Export name', gtk.CellRendererText(), text=1)) self.pref_export.append_column(gtk.TreeViewColumn('Exported fields', gtk.CellRendererText(), text=2)) self.glade.get_widget('predefined_exports').add(self.pref_export) self.pref_export.connect("row-activated", self.sel_predef) self.pref_export.connect('key_press_event', self.del_export_list_key) # Fill the predefined export tree view and show everything self.fill_predefwin() self.pref_export.show_all() def del_export_list_key(self,widget, event, *args): if event.keyval==gtk.keysyms.Delete: self.del_selected_export_list() def
(self, widget=None): self.del_selected_export_list() def del_selected_export_list(self): store, paths = self.pref_export.get_selection().get_selected_rows() for p in paths: export_fields= store.get_value(store.__getitem__(p[0]).iter,0) export_name= store.get_value(store.__getitem__(p[0]).iter,1) ir_export = rpc.RPCProxy('ir.exports') ir_export_line = rpc.RPCProxy('ir.exports.line') export_ids=ir_export.search([('name','=',export_name)]) for id in export_ids: fields=[] line_ids=ir_export_line.search([('export_id','=',id)]) obj_line=ir_export_line.read(line_ids) for i in range(0,len(obj_line)): fields.append(obj_line[i]['name']) if fields==export_fields: ir_export.unlink(id) ir_export_line.unlink(line_ids) store.remove(store.get_iter(p)) break def sig_sel_all(self, widget=None): self.model2.clear() for field, relation in self.fields.keys(): if not relation: self.model2.set(self.model2.append(), 0, self.fields[field], 1, field) def sig_sel(self, widget=None): sel = self.view1.get_selection() sel.selected_foreach(self._sig_sel_add) def _sig_sel_add(self, store, path, iter): name, relation = self.fields[store.get_value(iter,1)] #if relation: # return num = self.model2.append() self.model2.set(num, 0, store.get_value(iter,0), 1, store.get_value(iter,1)) def sig_unsel(self, widget=None): store, paths = self.view2.get_selection().get_selected_rows() for p in paths: store.remove(store.get_iter(p)) def sig_unsel_all(self, widget=None): self.model2.clear() def fill_predefwin(self): self.predef_model = gtk.ListStore(gobject.TYPE_PYOBJECT, gobject.TYPE_STRING, gobject.TYPE_STRING) ir_export = rpc.RPCProxy('ir.exports') ir_export_line = rpc.RPCProxy('ir.exports.line') export_ids = ir_export.search([('resource', '=', self.model)]) for export in ir_export.read(export_ids): fields = ir_export_line.read(export['export_fields']) self.predef_model.append(([f['name'] for f in fields], export['name'], ', '.join([self.fields_data[f['name']]['string'] for f in fields]))) self.pref_export.set_model(self.predef_model) def add_predef(self, button): name = common.ask('What is the name of this export ?') if not name: return ir_export = rpc.RPCProxy('ir.exports') iter = self.model2.get_iter_root() fields = [] while iter: field_name = self.model2.get_value(iter, 1) fields.append(field_name) iter = self.model2.iter_next(iter) ir_export.create({'name' : name, 'resource' : self.model, 'export_fields' : [(0, 0, {'name' : f}) for f in fields]}) self.predef_model.append((fields, name, ','.join([self.fields_data[f]['string'] for f in fields]))) def sel_predef(self, treeview, path, column): self.model2.clear() for field in self.predef_model[path[0]][0]: self.model2.append((self.fields_data[field]['string'], field)) def go(self): button = self.win.run() if button==gtk.RESPONSE_OK: fields = [] fields2 = [] iter = self.model2.get_iter_root() while iter: fields.append(self.model2.get_value(iter, 1)) fields2.append(self.model2.get_value(iter, 0)) iter = self.model2.iter_next(iter) action = self.wid_action.get_active() self.parent.present() self.win.destroy() import_comp = self.wid_import_compatible.get_active() ctx = self.context.copy() ctx['import_comp'] = import_comp result = datas_read(self.ids, self.model, fields, self.fields_data, context=ctx) if result.get('warning',False): common.message_box(_('Exportation Error !'), unicode(result.get('warning',False))) return False result = result.get('datas',[]) if import_comp: fields2 = fields if not action: open_excel(fields2, result) else: fname = common.file_selection(_('Save As...'), parent=self.parent, action=gtk.FILE_CHOOSER_ACTION_SAVE) if fname: export_csv(fname, fields2, result, self.wid_write_field_names.get_active()) return True else: self.parent.present() self.win.destroy() return False # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
del_export_list_btn
identifier_name
main.py
import requests import pandas as pd import datetime as dt import time import smtplib import json CLIENT_EMAIL = "" CLIENT_PASSWORD = "" program = True email = "david@creativewavelength.co.uk" now = dt.datetime.now() countries_csv = pd.read_csv("countries.csv", sep='\s*,\s*', engine='python') df = pd.DataFrame(countries_csv) df['latitude'] = df['latitude'].astype(float) # Calling Sun Times API def get_sun(lat,long): response = requests.get(url=f'https://api.sunrise-sunset.org/json?lat={lat}&lng={long}&formatted=0') sun_times = response.json() return sun_times #COLLECTS USER LAT / LONG & ASKS IF THEY WANT TO SUBMIT EMAIL FOR ALERTS. SAVES DATA TO JSON IF YES. def user_input(): user = {} search = False while search == False: search_area = input("Type in your country name / ISO code. \nOr type 'manual', if you want to use your own coordinates\n") if len(df.loc[(df['name'] == search_area.title())]) > 0: matched_result = df.loc[(df['name'] == search_area.title())] user_latitude = matched_result['latitude'].item() user_longitude = matched_result['longitude'].item() print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})") search = True elif len(df.loc[(df['country'] == search_area.upper())]) > 0: matched_result = df.loc[(df['country'] == search_area.upper())] user_latitude = matched_result['latitude'].item() user_longitude = matched_result['longitude'].item() print("=====================================================================================") print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})") print("=====================================================================================") search = True elif search_area.lower() == 'manual': user_latitude = input("Enter Latitude\n") user_longitude = input("Enter longitude\n") search = True else: print("Country not found in Database, check spelling or type 'manual' to enter location manually\n") question = input("Do you want email alerts when the ISS is visible from your location?\n") if question.lower() == "y" or question.lower() == "yes": print("Email Alert / Future use case / Testing") user_email = input("\nWhat is your email address?\nThis Application is not currently secured, please do not use a work / primary email address\n") new_entry = {'email': user_email, 'latitude': user_latitude, 'longitude':user_longitude} try: with open("users.json", "r") as user_file: # Reading old data data = pd.read_json("users.json") except FileNotFoundError: with open("users.json", "w") as user_file: default = {'email': ['david@creative-wavelength.com', 'porfirio.cd52000a@mailerq.net', 'rashad.0c3e9859@inboxeen.com', 'darrick.0694ea0c@creative-wavelength.com'], 'latitude': [40.463667, 53.41291, 37.09024, 35.86166], 'longitude': [-3.74922, -8.24389, -95.712891, 104.195397]} default_df = pd.DataFrame(default, columns=['email', 'latitude', 'longitude']) updated_df = default_df.append(new_entry, ignore_index=True) updated_df.to_json(r'users.json', indent=4) else: df_stored = pd.DataFrame(data) updated_df = df_stored.append(new_entry, ignore_index=True) updated_df.to_json(r'users.json',indent=4) finally: print("\nUser Lat, Long & Email Returned") return (user_latitude,user_longitude,user_email) else: print("user lat & user long returned only") return (user_latitude,user_longitude) user = user_input() user_la = user[0] user_lo = user[1] def get_iss_location(): response = requests.get(url="http://api.open-notify.org/iss-now.json")
response.raise_for_status() data = response.json() latitude = float(data["iss_position"]["latitude"]) longitude = float(data["iss_position"]["longitude"]) return (latitude,longitude) iss_location = get_iss_location() def find_user(): ISS = get_iss_location() try: json_stored = pd.read_json('users.json') df_stored = pd.DataFrame(json_stored) print(f" ISS location = {ISS}") except FileNotFoundError: print("File not Found") return False else: print("df_stored") print(df_stored) condition = df_stored['latitude'].between(-45,45) print(condition) find_user() latitudes = df['latitude'].to_list() longitudes = df['longitude'].to_list() nearby_countries = df[df['longitude'].between(iss_location[1] -45, iss_location[1] + 45) & df['latitude'].between(iss_location[0] -45, iss_location[0] + 45)] # Current limited use is, must be near the center of the country. countries_nearby_list = nearby_countries['name'].to_list() country_list = [] def nearby_countries(): for country in countries_nearby_list: country_add = df.loc[(df['name'] == country)] country_name = country_add['name'].item() country_latitude = country_add['latitude'].item() country_longitude = country_add['longitude'].item() direction = [] if iss_lat < country_latitude: direction.append("North") else: direction.append("South") if iss_long < country_longitude: direction.append("East") else: direction.append("West") country = {country_name: country_add['name'].item(), 'Latitude': country_add['latitude'].item(), 'Longitude':country_add['longitude'].item(), "Direction": f"{' '.join(direction)} of ISS"} country_list.append(country) iss_lat = get_iss_location()[0] iss_long = get_iss_location()[1] def direction_NS(iss_lat, country_location): if iss_lat > country_location['latitude']: print(f"ISS is North of {country_location['name']}") return "North" else: print("South") return "South" def direction_WE(iss_long, country_location): if iss_lat > country_location['latitude']: print(f"ISS is North of {country_location['name']}") return "North" else: print("South") return "South" #Check if there are countries nearby if len(countries_nearby_list) != 0: # print(countries_nearby_list) print("\n=====================================================================================") print(f"COUNTRIES THE ISS IS CURRENTLY PASSING OVER - {len(countries_nearby_list)}") nearby_countries() print("=====================================================================================") print(country_list) else: print("=====================================================================================") print(f"ISS NOT CURRENTLY LOCATED CLOSE ENOUGH TO CENTER OF ANY COUNTRY FOR REFERENCE") nearby_countries() print("=====================================================================================") print("\n=====================================================================================") print("Current Location Data") print("Times are in UTC") print("=====================================================================================") #Suntimes API # All this code does, is return True if it's night time where the user currently is. # Most of this code is for printing local information accessed from API / learning / testing features / keeping it there for potential future expansion. # Essentially though, all it does is return True at night. def local_is_night(user_la, user_lo): #GET CURRENT TIME current_time = int(str(now).split(' ')[1].split('.')[0][:-3].replace(":","")) sun_times = get_sun(user_la,user_lo) # print(current_time) #SUNRISE FORMATTING INTO DIGITS sunrise = int(sun_times["results"]["sunrise"].split("T")[1].split("+")[0][:-3].replace(":","")) sunset = int(sun_times["results"]["sunset"].split("T")[1].split("+")[0][:-3].replace(":","")) #SUNSET IN LOCATION if sunset > current_time: print(f"Current time is {sunrise}") time_until_sunset = sunset - current_time print(time_until_sunset) if len(str(time_until_sunset)) <= 2: print(f"{time_until_sunset} Minutes left until sunset in Submitted Location") else: print(f"Calculate time until sunset in Submitted location - {str(time_until_sunset)[:-2] + ':' + list(str(time_until_sunset))[1] + list(str(time_until_sunset))[2]}") print(f"Sunset time is {sunset}") #SUNRISE IN LOCATION if sunrise > current_time: print("Time until Sunrise ( Morning ) ") #TIME UNTIL SUNRISE TOMORROW if sunrise < current_time and sunset < current_time: sunrise_time_tomorrow = 2400 - current_time + sunrise print(f"Time until sunrise tomorrow {sunrise_time_tomorrow}") #FORMATTING FOR TIME OUTPUT if len(str(sunrise)) == 3: print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[1] + list(str(sunrise))[2]}") else: print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[2] + list(str(sunrise))[3]}") #SUNSET TIME - ALWAYS 4 DIGITS LONG print(f"Sunset time is {str(sunset)[:-2] + ':' + list(str(sunset))[2] + list(str(sunset))[3]}") #IS DARK? if sunset < current_time: return True else: return False if len(user) == 3: # Can pass email here if needed user_email = user[2] local_is_night(user_la,user_lo) else: local_is_night(user_la,user_lo) def is_iss_overhead(): print("Checking") while program == True: if is_iss_overhead() and local_is_night(): while True: if is_iss_overhead() and local_is_night(): with smtplib.SMTP("smtp.gmail.com") as connection: connection.starttls() connection.login(user=CLIENT_EMAIL, password=CLIENT_PASSWORD) connection.sendmail( from_addr=CLIENT_EMAIL, to_addrs=f"{email}", msg=f"Subject:Look Up 👆, The ISS is above your head! \n\nNo message tag yet") else: time.sleep(60) # Bug Catcher used earlier for finding formatting issue in data # for l in latitudes: # if isinstance(l,str): # print("String Located")
random_line_split
main.py
import requests import pandas as pd import datetime as dt import time import smtplib import json CLIENT_EMAIL = "" CLIENT_PASSWORD = "" program = True email = "david@creativewavelength.co.uk" now = dt.datetime.now() countries_csv = pd.read_csv("countries.csv", sep='\s*,\s*', engine='python') df = pd.DataFrame(countries_csv) df['latitude'] = df['latitude'].astype(float) # Calling Sun Times API def get_sun(lat,long): response = requests.get(url=f'https://api.sunrise-sunset.org/json?lat={lat}&lng={long}&formatted=0') sun_times = response.json() return sun_times #COLLECTS USER LAT / LONG & ASKS IF THEY WANT TO SUBMIT EMAIL FOR ALERTS. SAVES DATA TO JSON IF YES. def user_input(): user = {} search = False while search == False: search_area = input("Type in your country name / ISO code. \nOr type 'manual', if you want to use your own coordinates\n") if len(df.loc[(df['name'] == search_area.title())]) > 0: matched_result = df.loc[(df['name'] == search_area.title())] user_latitude = matched_result['latitude'].item() user_longitude = matched_result['longitude'].item() print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})") search = True elif len(df.loc[(df['country'] == search_area.upper())]) > 0: matched_result = df.loc[(df['country'] == search_area.upper())] user_latitude = matched_result['latitude'].item() user_longitude = matched_result['longitude'].item() print("=====================================================================================") print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})") print("=====================================================================================") search = True elif search_area.lower() == 'manual': user_latitude = input("Enter Latitude\n") user_longitude = input("Enter longitude\n") search = True else: print("Country not found in Database, check spelling or type 'manual' to enter location manually\n") question = input("Do you want email alerts when the ISS is visible from your location?\n") if question.lower() == "y" or question.lower() == "yes": print("Email Alert / Future use case / Testing") user_email = input("\nWhat is your email address?\nThis Application is not currently secured, please do not use a work / primary email address\n") new_entry = {'email': user_email, 'latitude': user_latitude, 'longitude':user_longitude} try: with open("users.json", "r") as user_file: # Reading old data data = pd.read_json("users.json") except FileNotFoundError: with open("users.json", "w") as user_file: default = {'email': ['david@creative-wavelength.com', 'porfirio.cd52000a@mailerq.net', 'rashad.0c3e9859@inboxeen.com', 'darrick.0694ea0c@creative-wavelength.com'], 'latitude': [40.463667, 53.41291, 37.09024, 35.86166], 'longitude': [-3.74922, -8.24389, -95.712891, 104.195397]} default_df = pd.DataFrame(default, columns=['email', 'latitude', 'longitude']) updated_df = default_df.append(new_entry, ignore_index=True) updated_df.to_json(r'users.json', indent=4) else: df_stored = pd.DataFrame(data) updated_df = df_stored.append(new_entry, ignore_index=True) updated_df.to_json(r'users.json',indent=4) finally: print("\nUser Lat, Long & Email Returned") return (user_latitude,user_longitude,user_email) else: print("user lat & user long returned only") return (user_latitude,user_longitude) user = user_input() user_la = user[0] user_lo = user[1] def get_iss_location(): response = requests.get(url="http://api.open-notify.org/iss-now.json") response.raise_for_status() data = response.json() latitude = float(data["iss_position"]["latitude"]) longitude = float(data["iss_position"]["longitude"]) return (latitude,longitude) iss_location = get_iss_location() def find_user(): ISS = get_iss_location() try: json_stored = pd.read_json('users.json') df_stored = pd.DataFrame(json_stored) print(f" ISS location = {ISS}") except FileNotFoundError: print("File not Found") return False else: print("df_stored") print(df_stored) condition = df_stored['latitude'].between(-45,45) print(condition) find_user() latitudes = df['latitude'].to_list() longitudes = df['longitude'].to_list() nearby_countries = df[df['longitude'].between(iss_location[1] -45, iss_location[1] + 45) & df['latitude'].between(iss_location[0] -45, iss_location[0] + 45)] # Current limited use is, must be near the center of the country. countries_nearby_list = nearby_countries['name'].to_list() country_list = [] def nearby_countries(): for country in countries_nearby_list: country_add = df.loc[(df['name'] == country)] country_name = country_add['name'].item() country_latitude = country_add['latitude'].item() country_longitude = country_add['longitude'].item() direction = [] if iss_lat < country_latitude: direction.append("North") else: direction.append("South") if iss_long < country_longitude: direction.append("East") else: direction.append("West") country = {country_name: country_add['name'].item(), 'Latitude': country_add['latitude'].item(), 'Longitude':country_add['longitude'].item(), "Direction": f"{' '.join(direction)} of ISS"} country_list.append(country) iss_lat = get_iss_location()[0] iss_long = get_iss_location()[1] def direction_NS(iss_lat, country_location): if iss_lat > country_location['latitude']: print(f"ISS is North of {country_location['name']}") return "North" else: print("South") return "South" def direction_WE(iss_long, country_location): if iss_lat > country_location['latitude']: print(f"ISS is North of {country_location['name']}") return "North" else: print("South") return "South" #Check if there are countries nearby if len(countries_nearby_list) != 0: # print(countries_nearby_list) print("\n=====================================================================================") print(f"COUNTRIES THE ISS IS CURRENTLY PASSING OVER - {len(countries_nearby_list)}") nearby_countries() print("=====================================================================================") print(country_list) else: print("=====================================================================================") print(f"ISS NOT CURRENTLY LOCATED CLOSE ENOUGH TO CENTER OF ANY COUNTRY FOR REFERENCE") nearby_countries() print("=====================================================================================") print("\n=====================================================================================") print("Current Location Data") print("Times are in UTC") print("=====================================================================================") #Suntimes API # All this code does, is return True if it's night time where the user currently is. # Most of this code is for printing local information accessed from API / learning / testing features / keeping it there for potential future expansion. # Essentially though, all it does is return True at night. def local_is_night(user_la, user_lo): #GET CURRENT TIME current_time = int(str(now).split(' ')[1].split('.')[0][:-3].replace(":","")) sun_times = get_sun(user_la,user_lo) # print(current_time) #SUNRISE FORMATTING INTO DIGITS sunrise = int(sun_times["results"]["sunrise"].split("T")[1].split("+")[0][:-3].replace(":","")) sunset = int(sun_times["results"]["sunset"].split("T")[1].split("+")[0][:-3].replace(":","")) #SUNSET IN LOCATION if sunset > current_time: print(f"Current time is {sunrise}") time_until_sunset = sunset - current_time print(time_until_sunset) if len(str(time_until_sunset)) <= 2: print(f"{time_until_sunset} Minutes left until sunset in Submitted Location") else: print(f"Calculate time until sunset in Submitted location - {str(time_until_sunset)[:-2] + ':' + list(str(time_until_sunset))[1] + list(str(time_until_sunset))[2]}") print(f"Sunset time is {sunset}") #SUNRISE IN LOCATION if sunrise > current_time: print("Time until Sunrise ( Morning ) ") #TIME UNTIL SUNRISE TOMORROW if sunrise < current_time and sunset < current_time: sunrise_time_tomorrow = 2400 - current_time + sunrise print(f"Time until sunrise tomorrow {sunrise_time_tomorrow}") #FORMATTING FOR TIME OUTPUT if len(str(sunrise)) == 3: print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[1] + list(str(sunrise))[2]}") else: print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[2] + list(str(sunrise))[3]}") #SUNSET TIME - ALWAYS 4 DIGITS LONG print(f"Sunset time is {str(sunset)[:-2] + ':' + list(str(sunset))[2] + list(str(sunset))[3]}") #IS DARK? if sunset < current_time: return True else: return False if len(user) == 3: # Can pass email here if needed user_email = user[2] local_is_night(user_la,user_lo) else: local_is_night(user_la,user_lo) def is_iss_overhead():
while program == True: if is_iss_overhead() and local_is_night(): while True: if is_iss_overhead() and local_is_night(): with smtplib.SMTP("smtp.gmail.com") as connection: connection.starttls() connection.login(user=CLIENT_EMAIL, password=CLIENT_PASSWORD) connection.sendmail( from_addr=CLIENT_EMAIL, to_addrs=f"{email}", msg=f"Subject:Look Up 👆, The ISS is above your head! \n\nNo message tag yet") else: time.sleep(60) # Bug Catcher used earlier for finding formatting issue in data # for l in latitudes: # if isinstance(l,str): # print("String Located")
print("Checking")
identifier_body
main.py
import requests import pandas as pd import datetime as dt import time import smtplib import json CLIENT_EMAIL = "" CLIENT_PASSWORD = "" program = True email = "david@creativewavelength.co.uk" now = dt.datetime.now() countries_csv = pd.read_csv("countries.csv", sep='\s*,\s*', engine='python') df = pd.DataFrame(countries_csv) df['latitude'] = df['latitude'].astype(float) # Calling Sun Times API def get_sun(lat,long): response = requests.get(url=f'https://api.sunrise-sunset.org/json?lat={lat}&lng={long}&formatted=0') sun_times = response.json() return sun_times #COLLECTS USER LAT / LONG & ASKS IF THEY WANT TO SUBMIT EMAIL FOR ALERTS. SAVES DATA TO JSON IF YES. def user_input(): user = {} search = False while search == False: search_area = input("Type in your country name / ISO code. \nOr type 'manual', if you want to use your own coordinates\n") if len(df.loc[(df['name'] == search_area.title())]) > 0: matched_result = df.loc[(df['name'] == search_area.title())] user_latitude = matched_result['latitude'].item() user_longitude = matched_result['longitude'].item() print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})") search = True elif len(df.loc[(df['country'] == search_area.upper())]) > 0: matched_result = df.loc[(df['country'] == search_area.upper())] user_latitude = matched_result['latitude'].item() user_longitude = matched_result['longitude'].item() print("=====================================================================================") print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})") print("=====================================================================================") search = True elif search_area.lower() == 'manual': user_latitude = input("Enter Latitude\n") user_longitude = input("Enter longitude\n") search = True else: print("Country not found in Database, check spelling or type 'manual' to enter location manually\n") question = input("Do you want email alerts when the ISS is visible from your location?\n") if question.lower() == "y" or question.lower() == "yes": print("Email Alert / Future use case / Testing") user_email = input("\nWhat is your email address?\nThis Application is not currently secured, please do not use a work / primary email address\n") new_entry = {'email': user_email, 'latitude': user_latitude, 'longitude':user_longitude} try: with open("users.json", "r") as user_file: # Reading old data data = pd.read_json("users.json") except FileNotFoundError: with open("users.json", "w") as user_file: default = {'email': ['david@creative-wavelength.com', 'porfirio.cd52000a@mailerq.net', 'rashad.0c3e9859@inboxeen.com', 'darrick.0694ea0c@creative-wavelength.com'], 'latitude': [40.463667, 53.41291, 37.09024, 35.86166], 'longitude': [-3.74922, -8.24389, -95.712891, 104.195397]} default_df = pd.DataFrame(default, columns=['email', 'latitude', 'longitude']) updated_df = default_df.append(new_entry, ignore_index=True) updated_df.to_json(r'users.json', indent=4) else: df_stored = pd.DataFrame(data) updated_df = df_stored.append(new_entry, ignore_index=True) updated_df.to_json(r'users.json',indent=4) finally: print("\nUser Lat, Long & Email Returned") return (user_latitude,user_longitude,user_email) else: print("user lat & user long returned only") return (user_latitude,user_longitude) user = user_input() user_la = user[0] user_lo = user[1] def get_iss_location(): response = requests.get(url="http://api.open-notify.org/iss-now.json") response.raise_for_status() data = response.json() latitude = float(data["iss_position"]["latitude"]) longitude = float(data["iss_position"]["longitude"]) return (latitude,longitude) iss_location = get_iss_location() def
(): ISS = get_iss_location() try: json_stored = pd.read_json('users.json') df_stored = pd.DataFrame(json_stored) print(f" ISS location = {ISS}") except FileNotFoundError: print("File not Found") return False else: print("df_stored") print(df_stored) condition = df_stored['latitude'].between(-45,45) print(condition) find_user() latitudes = df['latitude'].to_list() longitudes = df['longitude'].to_list() nearby_countries = df[df['longitude'].between(iss_location[1] -45, iss_location[1] + 45) & df['latitude'].between(iss_location[0] -45, iss_location[0] + 45)] # Current limited use is, must be near the center of the country. countries_nearby_list = nearby_countries['name'].to_list() country_list = [] def nearby_countries(): for country in countries_nearby_list: country_add = df.loc[(df['name'] == country)] country_name = country_add['name'].item() country_latitude = country_add['latitude'].item() country_longitude = country_add['longitude'].item() direction = [] if iss_lat < country_latitude: direction.append("North") else: direction.append("South") if iss_long < country_longitude: direction.append("East") else: direction.append("West") country = {country_name: country_add['name'].item(), 'Latitude': country_add['latitude'].item(), 'Longitude':country_add['longitude'].item(), "Direction": f"{' '.join(direction)} of ISS"} country_list.append(country) iss_lat = get_iss_location()[0] iss_long = get_iss_location()[1] def direction_NS(iss_lat, country_location): if iss_lat > country_location['latitude']: print(f"ISS is North of {country_location['name']}") return "North" else: print("South") return "South" def direction_WE(iss_long, country_location): if iss_lat > country_location['latitude']: print(f"ISS is North of {country_location['name']}") return "North" else: print("South") return "South" #Check if there are countries nearby if len(countries_nearby_list) != 0: # print(countries_nearby_list) print("\n=====================================================================================") print(f"COUNTRIES THE ISS IS CURRENTLY PASSING OVER - {len(countries_nearby_list)}") nearby_countries() print("=====================================================================================") print(country_list) else: print("=====================================================================================") print(f"ISS NOT CURRENTLY LOCATED CLOSE ENOUGH TO CENTER OF ANY COUNTRY FOR REFERENCE") nearby_countries() print("=====================================================================================") print("\n=====================================================================================") print("Current Location Data") print("Times are in UTC") print("=====================================================================================") #Suntimes API # All this code does, is return True if it's night time where the user currently is. # Most of this code is for printing local information accessed from API / learning / testing features / keeping it there for potential future expansion. # Essentially though, all it does is return True at night. def local_is_night(user_la, user_lo): #GET CURRENT TIME current_time = int(str(now).split(' ')[1].split('.')[0][:-3].replace(":","")) sun_times = get_sun(user_la,user_lo) # print(current_time) #SUNRISE FORMATTING INTO DIGITS sunrise = int(sun_times["results"]["sunrise"].split("T")[1].split("+")[0][:-3].replace(":","")) sunset = int(sun_times["results"]["sunset"].split("T")[1].split("+")[0][:-3].replace(":","")) #SUNSET IN LOCATION if sunset > current_time: print(f"Current time is {sunrise}") time_until_sunset = sunset - current_time print(time_until_sunset) if len(str(time_until_sunset)) <= 2: print(f"{time_until_sunset} Minutes left until sunset in Submitted Location") else: print(f"Calculate time until sunset in Submitted location - {str(time_until_sunset)[:-2] + ':' + list(str(time_until_sunset))[1] + list(str(time_until_sunset))[2]}") print(f"Sunset time is {sunset}") #SUNRISE IN LOCATION if sunrise > current_time: print("Time until Sunrise ( Morning ) ") #TIME UNTIL SUNRISE TOMORROW if sunrise < current_time and sunset < current_time: sunrise_time_tomorrow = 2400 - current_time + sunrise print(f"Time until sunrise tomorrow {sunrise_time_tomorrow}") #FORMATTING FOR TIME OUTPUT if len(str(sunrise)) == 3: print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[1] + list(str(sunrise))[2]}") else: print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[2] + list(str(sunrise))[3]}") #SUNSET TIME - ALWAYS 4 DIGITS LONG print(f"Sunset time is {str(sunset)[:-2] + ':' + list(str(sunset))[2] + list(str(sunset))[3]}") #IS DARK? if sunset < current_time: return True else: return False if len(user) == 3: # Can pass email here if needed user_email = user[2] local_is_night(user_la,user_lo) else: local_is_night(user_la,user_lo) def is_iss_overhead(): print("Checking") while program == True: if is_iss_overhead() and local_is_night(): while True: if is_iss_overhead() and local_is_night(): with smtplib.SMTP("smtp.gmail.com") as connection: connection.starttls() connection.login(user=CLIENT_EMAIL, password=CLIENT_PASSWORD) connection.sendmail( from_addr=CLIENT_EMAIL, to_addrs=f"{email}", msg=f"Subject:Look Up 👆, The ISS is above your head! \n\nNo message tag yet") else: time.sleep(60) # Bug Catcher used earlier for finding formatting issue in data # for l in latitudes: # if isinstance(l,str): # print("String Located")
find_user
identifier_name
main.py
import requests import pandas as pd import datetime as dt import time import smtplib import json CLIENT_EMAIL = "" CLIENT_PASSWORD = "" program = True email = "david@creativewavelength.co.uk" now = dt.datetime.now() countries_csv = pd.read_csv("countries.csv", sep='\s*,\s*', engine='python') df = pd.DataFrame(countries_csv) df['latitude'] = df['latitude'].astype(float) # Calling Sun Times API def get_sun(lat,long): response = requests.get(url=f'https://api.sunrise-sunset.org/json?lat={lat}&lng={long}&formatted=0') sun_times = response.json() return sun_times #COLLECTS USER LAT / LONG & ASKS IF THEY WANT TO SUBMIT EMAIL FOR ALERTS. SAVES DATA TO JSON IF YES. def user_input(): user = {} search = False while search == False: search_area = input("Type in your country name / ISO code. \nOr type 'manual', if you want to use your own coordinates\n") if len(df.loc[(df['name'] == search_area.title())]) > 0: matched_result = df.loc[(df['name'] == search_area.title())] user_latitude = matched_result['latitude'].item() user_longitude = matched_result['longitude'].item() print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})") search = True elif len(df.loc[(df['country'] == search_area.upper())]) > 0: matched_result = df.loc[(df['country'] == search_area.upper())] user_latitude = matched_result['latitude'].item() user_longitude = matched_result['longitude'].item() print("=====================================================================================") print(f"Database entry for {matched_result['name'].item()} used for latitude ({user_latitude}) & longitude({user_longitude})") print("=====================================================================================") search = True elif search_area.lower() == 'manual': user_latitude = input("Enter Latitude\n") user_longitude = input("Enter longitude\n") search = True else: print("Country not found in Database, check spelling or type 'manual' to enter location manually\n") question = input("Do you want email alerts when the ISS is visible from your location?\n") if question.lower() == "y" or question.lower() == "yes": print("Email Alert / Future use case / Testing") user_email = input("\nWhat is your email address?\nThis Application is not currently secured, please do not use a work / primary email address\n") new_entry = {'email': user_email, 'latitude': user_latitude, 'longitude':user_longitude} try: with open("users.json", "r") as user_file: # Reading old data data = pd.read_json("users.json") except FileNotFoundError: with open("users.json", "w") as user_file: default = {'email': ['david@creative-wavelength.com', 'porfirio.cd52000a@mailerq.net', 'rashad.0c3e9859@inboxeen.com', 'darrick.0694ea0c@creative-wavelength.com'], 'latitude': [40.463667, 53.41291, 37.09024, 35.86166], 'longitude': [-3.74922, -8.24389, -95.712891, 104.195397]} default_df = pd.DataFrame(default, columns=['email', 'latitude', 'longitude']) updated_df = default_df.append(new_entry, ignore_index=True) updated_df.to_json(r'users.json', indent=4) else: df_stored = pd.DataFrame(data) updated_df = df_stored.append(new_entry, ignore_index=True) updated_df.to_json(r'users.json',indent=4) finally: print("\nUser Lat, Long & Email Returned") return (user_latitude,user_longitude,user_email) else: print("user lat & user long returned only") return (user_latitude,user_longitude) user = user_input() user_la = user[0] user_lo = user[1] def get_iss_location(): response = requests.get(url="http://api.open-notify.org/iss-now.json") response.raise_for_status() data = response.json() latitude = float(data["iss_position"]["latitude"]) longitude = float(data["iss_position"]["longitude"]) return (latitude,longitude) iss_location = get_iss_location() def find_user(): ISS = get_iss_location() try: json_stored = pd.read_json('users.json') df_stored = pd.DataFrame(json_stored) print(f" ISS location = {ISS}") except FileNotFoundError: print("File not Found") return False else: print("df_stored") print(df_stored) condition = df_stored['latitude'].between(-45,45) print(condition) find_user() latitudes = df['latitude'].to_list() longitudes = df['longitude'].to_list() nearby_countries = df[df['longitude'].between(iss_location[1] -45, iss_location[1] + 45) & df['latitude'].between(iss_location[0] -45, iss_location[0] + 45)] # Current limited use is, must be near the center of the country. countries_nearby_list = nearby_countries['name'].to_list() country_list = [] def nearby_countries(): for country in countries_nearby_list: country_add = df.loc[(df['name'] == country)] country_name = country_add['name'].item() country_latitude = country_add['latitude'].item() country_longitude = country_add['longitude'].item() direction = [] if iss_lat < country_latitude: direction.append("North") else: direction.append("South") if iss_long < country_longitude: direction.append("East") else: direction.append("West") country = {country_name: country_add['name'].item(), 'Latitude': country_add['latitude'].item(), 'Longitude':country_add['longitude'].item(), "Direction": f"{' '.join(direction)} of ISS"} country_list.append(country) iss_lat = get_iss_location()[0] iss_long = get_iss_location()[1] def direction_NS(iss_lat, country_location): if iss_lat > country_location['latitude']: print(f"ISS is North of {country_location['name']}") return "North" else: print("South") return "South" def direction_WE(iss_long, country_location): if iss_lat > country_location['latitude']: print(f"ISS is North of {country_location['name']}") return "North" else: print("South") return "South" #Check if there are countries nearby if len(countries_nearby_list) != 0: # print(countries_nearby_list) print("\n=====================================================================================") print(f"COUNTRIES THE ISS IS CURRENTLY PASSING OVER - {len(countries_nearby_list)}") nearby_countries() print("=====================================================================================") print(country_list) else: print("=====================================================================================") print(f"ISS NOT CURRENTLY LOCATED CLOSE ENOUGH TO CENTER OF ANY COUNTRY FOR REFERENCE") nearby_countries() print("=====================================================================================") print("\n=====================================================================================") print("Current Location Data") print("Times are in UTC") print("=====================================================================================") #Suntimes API # All this code does, is return True if it's night time where the user currently is. # Most of this code is for printing local information accessed from API / learning / testing features / keeping it there for potential future expansion. # Essentially though, all it does is return True at night. def local_is_night(user_la, user_lo): #GET CURRENT TIME current_time = int(str(now).split(' ')[1].split('.')[0][:-3].replace(":","")) sun_times = get_sun(user_la,user_lo) # print(current_time) #SUNRISE FORMATTING INTO DIGITS sunrise = int(sun_times["results"]["sunrise"].split("T")[1].split("+")[0][:-3].replace(":","")) sunset = int(sun_times["results"]["sunset"].split("T")[1].split("+")[0][:-3].replace(":","")) #SUNSET IN LOCATION if sunset > current_time: print(f"Current time is {sunrise}") time_until_sunset = sunset - current_time print(time_until_sunset) if len(str(time_until_sunset)) <= 2: print(f"{time_until_sunset} Minutes left until sunset in Submitted Location") else: print(f"Calculate time until sunset in Submitted location - {str(time_until_sunset)[:-2] + ':' + list(str(time_until_sunset))[1] + list(str(time_until_sunset))[2]}") print(f"Sunset time is {sunset}") #SUNRISE IN LOCATION if sunrise > current_time: print("Time until Sunrise ( Morning ) ") #TIME UNTIL SUNRISE TOMORROW if sunrise < current_time and sunset < current_time: sunrise_time_tomorrow = 2400 - current_time + sunrise print(f"Time until sunrise tomorrow {sunrise_time_tomorrow}") #FORMATTING FOR TIME OUTPUT if len(str(sunrise)) == 3: print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[1] + list(str(sunrise))[2]}") else: print(f"Sunrise time is {str(sunrise)[:-2] + ':' + list(str(sunrise))[2] + list(str(sunrise))[3]}") #SUNSET TIME - ALWAYS 4 DIGITS LONG print(f"Sunset time is {str(sunset)[:-2] + ':' + list(str(sunset))[2] + list(str(sunset))[3]}") #IS DARK? if sunset < current_time: return True else: return False if len(user) == 3: # Can pass email here if needed
else: local_is_night(user_la,user_lo) def is_iss_overhead(): print("Checking") while program == True: if is_iss_overhead() and local_is_night(): while True: if is_iss_overhead() and local_is_night(): with smtplib.SMTP("smtp.gmail.com") as connection: connection.starttls() connection.login(user=CLIENT_EMAIL, password=CLIENT_PASSWORD) connection.sendmail( from_addr=CLIENT_EMAIL, to_addrs=f"{email}", msg=f"Subject:Look Up 👆, The ISS is above your head! \n\nNo message tag yet") else: time.sleep(60) # Bug Catcher used earlier for finding formatting issue in data # for l in latitudes: # if isinstance(l,str): # print("String Located")
user_email = user[2] local_is_night(user_la,user_lo)
conditional_block
helpers.py
import os import h5py from matplotlib.colors import Normalize import gzip import pandas as pd import numpy as np from matplotlib import cbook from numpy import ma from scipy.stats import pearsonr from sklearn.linear_model import LinearRegression from sklearn.decomposition import PCA import cv2 # import mahotas import scipy.stats as st import scipy as sp from tqdm import tqdm # from pebble import ProcessPool, ProcessExpired from concurrent.futures import TimeoutError from pyensembl import EnsemblRelease data = EnsemblRelease(77) from scipy.stats import norm GTEx_directory = '/hps/nobackup/research/stegle/users/willj/GTEx' os.environ['PYENSEMBL_CACHE_DIR'] = GTEx_directory class MidPointNorm(Normalize): """ Ensures that heatmap colour bars are zero centered. """ def __init__(self, midpoint=0, vmin=None, vmax=None, clip=False): Normalize.__init__(self,vmin, vmax, clip) self.midpoint = midpoint def __call__(self, value, clip=None): if clip is None: clip = self.clip result, is_scalar = self.process_value(value) self.autoscale_None(result) vmin, vmax, midpoint = self.vmin, self.vmax, self.midpoint if not (vmin < midpoint < vmax): raise ValueError("midpoint must be between maxvalue and minvalue.") elif vmin == vmax: result.fill(0) # Or should it be all masked? Or 0.5? elif vmin > vmax: raise ValueError("maxvalue must be bigger than minvalue") else: vmin = float(vmin) vmax = float(vmax) if clip: mask = ma.getmask(result) result = ma.array(np.clip(result.filled(vmax), vmin, vmax), mask=mask) # ma division is very slow; we can take a shortcut resdat = result.data #First scale to -1 to 1 range, than to from 0 to 1. resdat -= midpoint resdat[resdat>0] /= abs(vmax - midpoint) resdat[resdat<0] /= abs(vmin - midpoint) resdat /= 2. resdat += 0.5 result = ma.array(resdat, mask=result.mask, copy=False) if is_scalar: result = result[0] return result def inverse(self, value): if not self.scaled(): raise ValueError("Not invertible until scaled") vmin, vmax, midpoint = self.vmin, self.vmax, self.midpoint if cbook.iterable(value): val = ma.asarray(value) val = 2 * (val-0.5) val[val>0] *= abs(vmax - midpoint) val[val<0] *= abs(vmin - midpoint) val += midpoint return val else: val = 2 * (val - 0.5) if val < 0: return val*abs(vmin-midpoint) + midpoint else: return val*abs(vmax-midpoint) + midpoint def extract_final_layer_data(t, m, a, ps, genotypes=False, shuffle=False): with h5py.File(GTEx_directory + '/data/h5py/aggregated_features.h5py', 'r') as f: X = f[t]['ordered_expression'].value tIDs = f[t]['transcriptIDs'].value dIDs = f[t]['donorIDs'].value tfs, ths, t_idx = \ get_technical_factors(t, dIDs) size_group = f[t]['-1'][ps] Y = size_group[m][a]['ordered_aggregated_features'].value if shuffle: idx = np.array(range(Y.shape[0])) np.random.shuffle(idx) Y = Y[idx,:] Y[Y < 0] = 0 if genotypes: G = f[t]['ordered_genotypes'].value gIDs = f[t]['genotype_locations'].value return Y, X, G, dIDs, tIDs, gIDs, \ tfs, ths, t_idx else: return Y, X, dIDs, tIDs, \ tfs, ths, t_idx def extract_mid_layer_data(t, l, ca, m, a, ps): with h5py.File(GTEx_directory + '/data/h5py/aggregated_features.h5py', 'r') as f: expression = f[t]['ordered_expression'].value transcriptIDs = f[t]['transcriptIDs'].value donorIDs = f[t]['donorIDs'].value technical_factors, technical_headers, technical_idx = \ get_technical_factors(t, donorIDs) size_group = f[t][l][ca][ps] features = size_group[m][a]['ordered_aggregated_features'].value features[features < 0] = 0 return features, expression, donorIDs, transcriptIDs, \ technical_factors, technical_headers, technical_idx def get_technical_factors(tissue, donorIDs): phenotype_filepath = '/nfs/research2/stegle/stegle_secure/GTEx/download/49139/PhenoGenotypeFiles/RootStudyConsentSet_phs000424.GTEx.v6.p1.c1.GRU/PhenotypeFiles/phs000424.v6.pht002743.v6.p1.c1.GTEx_Sample_Attributes.GRU.txt.gz' with gzip.open(phenotype_filepath, 'rb') as f: g = f.read().splitlines() phenotype_array = [str(x, 'utf-8').split('\t') for x in g if not str(x, 'utf-8').startswith('#')] phenotype_array = phenotype_array[1:] phenotype_df = pd.DataFrame(phenotype_array) phenotype_df.columns = phenotype_df.iloc[0] phenotype_df = phenotype_df[1:] tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue] donorIDs = [x.decode('utf-8') for x in donorIDs] phenotype_donorIDs = [x.split('-')[1] for x in tissue_df['SAMPID']] phenotype_idx = [phenotype_donorIDs.index(ID) for ID in donorIDs] tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue] tissue_df = tissue_df.iloc[phenotype_idx, :] SMCENTER_dummy = pd.get_dummies(tissue_df['SMCENTER']) for d in SMCENTER_dummy.columns: tissue_df['SMCENTER_' + d] = SMCENTER_dummy[d] clean_tissue_df = pd.DataFrame() for col in tissue_df.columns: clean_factor = pd.to_numeric(tissue_df[col], errors='coerce') clean_tissue_df[col] = clean_factor clean_tissue_df = clean_tissue_df.dropna(how='all', axis=1) technical_idx = np.array(clean_tissue_df.isnull().sum(axis=1) == 0) clean_tissue_df = clean_tissue_df.dropna(how='any', axis=0) technical_factors, technical_headers = \ np.array(clean_tissue_df), clean_tissue_df.columns technical_headers = technical_headers[technical_factors.std(0) > 0] technical_factors = technical_factors[:,technical_factors.std(0) > 0] return technical_factors, technical_headers, technical_idx def filter_and_correct_expression_and_image_features(tissue, model, aggregation, patch_size, M, k, pc_correction=False, tf_correction=False): """ Computes M most varying pvalues across all patch sizes. - Filters to the top M most varying genes that have mean expression > k. Optional: - Performs PC correction - regresses out effect of first x PCs from image features, and substracts the first x PCs from the expression matrix. - Performs TF correction - regresses out effect of five PCs from both the image features, and expression. """ # Filter expression Y, X, dIDs, tIDs, tfs, ths, t_idx = extract_final_layer_data(tissue, model, aggregation, patch_size) filt_X, filt_tIDs, final_exp_idx = filter_expression(X, tIDs, M, k) if pc_correction: print ('Correcting with {} expression PCs'.format(pc_correction)) pca = PCA(n_components=pc_correction) pca_predictors = pca.fit_transform(filt_X) # Correct Y lr = LinearRegression() lr.fit(pca_predictors, Y) predicted_Y = lr.predict(pca_predictors) corrected_Y = Y - predicted_Y # Correct X projected_filt_X = np.dot(pca_predictors,pca.components_) corrected_filt_X = filt_X - projected_filt_X # Set as return variables final_X = corrected_filt_X final_Y = corrected_Y elif tf_correction: print('Correcting with all technical factors') tf_Y = Y[t_idx,:] tf_filt_X = filt_X[t_idx,:] tfs[list(ths).index('SMTSISCH')] = np.log2(tfs[list(ths).index('SMTSISCH')] + 1) tf_predictors = tfs #Correct Y lr_Y = LinearRegression() lr_Y.fit(tf_predictors, tf_Y) tf_Y_predicted = lr_Y.predict(tf_predictors) corrected_tf_Y = tf_Y - tf_Y_predicted #Correct X lr_X = LinearRegression() lr_X.fit(tf_predictors, tf_filt_X) tf_filt_X_predicted = lr_X.predict(tf_predictors) corrected_tf_filt_X = tf_filt_X - tf_filt_X_predicted # Set as return variables final_X = corrected_tf_filt_X final_Y = corrected_tf_Y else: # Set unmodified values as return variables final_X = filt_X final_Y = Y return final_Y, final_X, dIDs, filt_tIDs, tfs, ths, t_idx def filter_features(Y, N): """ Return top N varying image features. """ most_varying_feature_idx = np.argsort(np.std(Y, axis=0))[-N:] filt_Y = Y[:, most_varying_feature_idx] return filt_Y, most_varying_feature_idx def filter_expression(X, tIDs, M, k): """ Return top M varying transcripts, with mean expression > k, along with their transcript names. """ k_threshold_idx = np.mean(X, axis=0) > k M_varying_idx = np.argsort(np.std(X[:,k_threshold_idx], axis=0))[-M:] idx = np.array(list(range(X.shape[1]))) final_exp_idx = idx[k_threshold_idx][M_varying_idx] filt_X = X[:, final_exp_idx] filt_tIDs = tIDs[final_exp_idx] return filt_X, filt_tIDs, final_exp_idx def compute_pearsonR(Y, X, parallel=False, verbose=False): """ Perform pairwise associations between filt_features and filt_expression. Also computes pvalues for 1 random shuffles. """ # Make sure all features are > 0 # X[X < 0] = 0 N = Y.shape[1] M = X.shape[1] if parallel: print('Computing in parallel') results = {} shuffle = ['real', 'shuffle'] for sh in shuffle: print ("Shuffle: {}".format(sh)) Y_copy = Y.copy() shuf_idx = list(range(Y.shape[0])) if sh != 'real': np.random.shuffle(shuf_idx) Y_copy = Y_copy[shuf_idx, :] if parallel: def perform_pearsonr(idx): i, j = idx R, pv = pearsonr(Y_copy[:, i], X[:, j]) # pbar.update(1) return R, pv indicies = [] for i in range(N): for j in range(M): idx = (i,j) indicies.append(idx) import pathos import time pool = pathos.pools.ProcessPool(node=32) results = pool.map(perform_pearsonr, indicies) R_mat = np.array([x[0] for x in results]).reshape(N,M) pvs = np.array([x[1] for x in parallel_results]).reshape(N,M) else: pbar = tqdm(total=N*M) R_mat = np.zeros((N, M)) pvs = np.zeros((N, M)) for i in range(N): for j in range(M): R, pv = pearsonr(Y_copy[:, i], X[:, j]) R_mat[i, j] = R pvs[i, j] = pv pbar.update(1) pbar.close() results['Rs_{}'.format(sh)] = R_mat results['pvs_{}'.format(sh)] = pvs return results['Rs_real'], results['pvs_real'], results['pvs_shuffle'] def create_tissue_boundary(ID, tissue, patchsize): from openslide import open_slide image_filepath = os.path.join(GTEx_directory, 'data', 'raw', tissue, ID + '.svs') image_slide = open_slide(image_filepath) toplevel = image_slide.level_count - 1 topdim = image_slide.level_dimensions[-1] topdownsample = image_slide.level_downsamples[-1] topdownsampleint = int(topdownsample) toplevelslide = image_slide.read_region((0, 0), toplevel, topdim) toplevelslide = np.array(toplevelslide) toplevelslide = toplevelslide[:, :, 0:3] slide = toplevelslide blurredslide = cv2.GaussianBlur(slide, (51, 51), 0) blurredslide = cv2.cvtColor(blurredslide, cv2.COLOR_BGR2GRAY) T_otsu = mahotas.otsu(blurredslide) mask = np.zeros_like(slide) mask = mask[:, :, 0] mask[blurredslide < T_otsu] = 255 downsampledpatchsize = patchsize / topdownsampleint xlimit = int(topdim[1] / downsampledpatchsize) ylimit = int(topdim[0] / downsampledpatchsize) # Find downsampled coords coords = [] for i in range(xlimit): for j in range(ylimit): x = int(downsampledpatchsize/2 + i*downsampledpatchsize) y = int(downsampledpatchsize/2 + j*downsampledpatchsize) coords.append((x, y)) # Find coords in downsampled mask mask_coords = [] for c in coords: x = c[0] y = c[1] if mask[x, y] > 0: mask_coords.append(c) slidemarkings = slide.copy() for c in mask_coords: x = c[0] y = c[1] slidemarkings[x-3:x+3, y-3:y+3] = [0, 0, 255] return slide, mask, slidemarkings def top5_bottom5_image(tissue, model, patchsize, feature): """ Displays thumbnails of the top 5 and bottom 5 images that activate a given image features at a specific patchsize """ from openslide import open_slide features, expression, donorIDs, transcriptIDs, technical_factors, technical_headers, technical_idx = extract_final_layer_data(tissue, model, 'mean', patchsize) sorted_idx = np.argsort(features[:,feature - 1]) donorIDs_ordered = donorIDs[sorted_idx] tissue_filepath = os.path.join(GTEx_directory,'data','raw',tissue) LungGTExIDs = os.listdir(tissue_filepath) LungdonorIDs = [x.split('.')[0].split('-')[1] for x in LungGTExIDs] ordered_GTExIDs = np.array(LungGTExIDs)[[LungdonorIDs.index(x.decode('utf-8')) for x in donorIDs_ordered]] topIDs = ordered_GTExIDs[-5:] bottomIDs = ordered_GTExIDs[:5] top_five_images = [] bottom_five_images = [] for (k,ID) in enumerate(topIDs): image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID) slide = open_slide(image_filepath) x = slide.get_thumbnail(size=(400,400)) top_five_images.append(x) for (k,ID) in enumerate(bottomIDs): image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID) slide = open_slide(image_filepath) x = slide.get_thumbnail(size=(400,400)) bottom_five_images.append(x) return top_five_images, bottom_five_images def estimate_lambda(pv): """estimate lambda form a set of PV""" LOD2 = sp.median(st.chi2.isf(pv, 1)) null_median = st.chi2.median(1) L = (LOD2 / null_median) return L def display_tissue_feature_gradient(feature, tissue): from openslide import open_slide features, expression, donorIDs, transcriptIDs, technical_factors, technical_headers, technical_idx = extract_final_layer_data(tissue, 'retrained', 'mean', '256') sorted_idx = np.argsort(features[:,feature - 1]) donorIDs_ordered = donorIDs[sorted_idx] gradient_IDs = [donorIDs_ordered[20*i] for i in range(13)] tissue_filepath = os.path.join(GTEx_directory,'data','raw',tissue) LungGTExIDs = os.listdir(tissue_filepath) LungdonorIDs = [x.split('.')[0].split('-')[1] for x in LungGTExIDs] ordered_GTExIDs = np.array(LungGTExIDs)[[LungdonorIDs.index(x.decode('utf-8')) for x in donorIDs_ordered]] thumbnails = [] pbar = tqdm(total=len(ordered_GTExIDs)) for (k,ID) in enumerate(ordered_GTExIDs): image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID) slide = open_slide(image_filepath) thumbnail = slide.get_thumbnail(size=(400,400)) feature_value = features[:,feature - 1][sorted_idx[k]] thumbnails.append((thumbnail, feature_value)) pbar.update(1) return thumbnails def get_gene_name(transcript): transcript_id = transcript.decode('utf-8').split('.')[0] try: gene_name = data.gene_name_of_gene_id(transcript_id) except: gene_name = transcript_id return gene_name def
(original_feature): mu, std = norm.fit(original_feature) target = [np.random.normal()*std + mu for i in range(271)] result = quantile_normalize_using_target(original_feature, target) return result def quantile_normalize_using_target(x, target): """ Both `x` and `target` are numpy arrays of equal lengths. """ target_sorted = np.sort(target) return target_sorted[x.argsort().argsort()]
normalize_feature
identifier_name
helpers.py
import os import h5py from matplotlib.colors import Normalize import gzip import pandas as pd import numpy as np from matplotlib import cbook from numpy import ma from scipy.stats import pearsonr from sklearn.linear_model import LinearRegression from sklearn.decomposition import PCA import cv2 # import mahotas import scipy.stats as st import scipy as sp from tqdm import tqdm # from pebble import ProcessPool, ProcessExpired from concurrent.futures import TimeoutError from pyensembl import EnsemblRelease data = EnsemblRelease(77) from scipy.stats import norm GTEx_directory = '/hps/nobackup/research/stegle/users/willj/GTEx' os.environ['PYENSEMBL_CACHE_DIR'] = GTEx_directory class MidPointNorm(Normalize): """ Ensures that heatmap colour bars are zero centered. """ def __init__(self, midpoint=0, vmin=None, vmax=None, clip=False): Normalize.__init__(self,vmin, vmax, clip) self.midpoint = midpoint def __call__(self, value, clip=None): if clip is None: clip = self.clip result, is_scalar = self.process_value(value) self.autoscale_None(result) vmin, vmax, midpoint = self.vmin, self.vmax, self.midpoint if not (vmin < midpoint < vmax): raise ValueError("midpoint must be between maxvalue and minvalue.") elif vmin == vmax: result.fill(0) # Or should it be all masked? Or 0.5? elif vmin > vmax: raise ValueError("maxvalue must be bigger than minvalue") else: vmin = float(vmin) vmax = float(vmax) if clip: mask = ma.getmask(result) result = ma.array(np.clip(result.filled(vmax), vmin, vmax), mask=mask) # ma division is very slow; we can take a shortcut resdat = result.data #First scale to -1 to 1 range, than to from 0 to 1. resdat -= midpoint resdat[resdat>0] /= abs(vmax - midpoint) resdat[resdat<0] /= abs(vmin - midpoint) resdat /= 2. resdat += 0.5 result = ma.array(resdat, mask=result.mask, copy=False) if is_scalar: result = result[0] return result def inverse(self, value): if not self.scaled():
vmin, vmax, midpoint = self.vmin, self.vmax, self.midpoint if cbook.iterable(value): val = ma.asarray(value) val = 2 * (val-0.5) val[val>0] *= abs(vmax - midpoint) val[val<0] *= abs(vmin - midpoint) val += midpoint return val else: val = 2 * (val - 0.5) if val < 0: return val*abs(vmin-midpoint) + midpoint else: return val*abs(vmax-midpoint) + midpoint def extract_final_layer_data(t, m, a, ps, genotypes=False, shuffle=False): with h5py.File(GTEx_directory + '/data/h5py/aggregated_features.h5py', 'r') as f: X = f[t]['ordered_expression'].value tIDs = f[t]['transcriptIDs'].value dIDs = f[t]['donorIDs'].value tfs, ths, t_idx = \ get_technical_factors(t, dIDs) size_group = f[t]['-1'][ps] Y = size_group[m][a]['ordered_aggregated_features'].value if shuffle: idx = np.array(range(Y.shape[0])) np.random.shuffle(idx) Y = Y[idx,:] Y[Y < 0] = 0 if genotypes: G = f[t]['ordered_genotypes'].value gIDs = f[t]['genotype_locations'].value return Y, X, G, dIDs, tIDs, gIDs, \ tfs, ths, t_idx else: return Y, X, dIDs, tIDs, \ tfs, ths, t_idx def extract_mid_layer_data(t, l, ca, m, a, ps): with h5py.File(GTEx_directory + '/data/h5py/aggregated_features.h5py', 'r') as f: expression = f[t]['ordered_expression'].value transcriptIDs = f[t]['transcriptIDs'].value donorIDs = f[t]['donorIDs'].value technical_factors, technical_headers, technical_idx = \ get_technical_factors(t, donorIDs) size_group = f[t][l][ca][ps] features = size_group[m][a]['ordered_aggregated_features'].value features[features < 0] = 0 return features, expression, donorIDs, transcriptIDs, \ technical_factors, technical_headers, technical_idx def get_technical_factors(tissue, donorIDs): phenotype_filepath = '/nfs/research2/stegle/stegle_secure/GTEx/download/49139/PhenoGenotypeFiles/RootStudyConsentSet_phs000424.GTEx.v6.p1.c1.GRU/PhenotypeFiles/phs000424.v6.pht002743.v6.p1.c1.GTEx_Sample_Attributes.GRU.txt.gz' with gzip.open(phenotype_filepath, 'rb') as f: g = f.read().splitlines() phenotype_array = [str(x, 'utf-8').split('\t') for x in g if not str(x, 'utf-8').startswith('#')] phenotype_array = phenotype_array[1:] phenotype_df = pd.DataFrame(phenotype_array) phenotype_df.columns = phenotype_df.iloc[0] phenotype_df = phenotype_df[1:] tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue] donorIDs = [x.decode('utf-8') for x in donorIDs] phenotype_donorIDs = [x.split('-')[1] for x in tissue_df['SAMPID']] phenotype_idx = [phenotype_donorIDs.index(ID) for ID in donorIDs] tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue] tissue_df = tissue_df.iloc[phenotype_idx, :] SMCENTER_dummy = pd.get_dummies(tissue_df['SMCENTER']) for d in SMCENTER_dummy.columns: tissue_df['SMCENTER_' + d] = SMCENTER_dummy[d] clean_tissue_df = pd.DataFrame() for col in tissue_df.columns: clean_factor = pd.to_numeric(tissue_df[col], errors='coerce') clean_tissue_df[col] = clean_factor clean_tissue_df = clean_tissue_df.dropna(how='all', axis=1) technical_idx = np.array(clean_tissue_df.isnull().sum(axis=1) == 0) clean_tissue_df = clean_tissue_df.dropna(how='any', axis=0) technical_factors, technical_headers = \ np.array(clean_tissue_df), clean_tissue_df.columns technical_headers = technical_headers[technical_factors.std(0) > 0] technical_factors = technical_factors[:,technical_factors.std(0) > 0] return technical_factors, technical_headers, technical_idx def filter_and_correct_expression_and_image_features(tissue, model, aggregation, patch_size, M, k, pc_correction=False, tf_correction=False): """ Computes M most varying pvalues across all patch sizes. - Filters to the top M most varying genes that have mean expression > k. Optional: - Performs PC correction - regresses out effect of first x PCs from image features, and substracts the first x PCs from the expression matrix. - Performs TF correction - regresses out effect of five PCs from both the image features, and expression. """ # Filter expression Y, X, dIDs, tIDs, tfs, ths, t_idx = extract_final_layer_data(tissue, model, aggregation, patch_size) filt_X, filt_tIDs, final_exp_idx = filter_expression(X, tIDs, M, k) if pc_correction: print ('Correcting with {} expression PCs'.format(pc_correction)) pca = PCA(n_components=pc_correction) pca_predictors = pca.fit_transform(filt_X) # Correct Y lr = LinearRegression() lr.fit(pca_predictors, Y) predicted_Y = lr.predict(pca_predictors) corrected_Y = Y - predicted_Y # Correct X projected_filt_X = np.dot(pca_predictors,pca.components_) corrected_filt_X = filt_X - projected_filt_X # Set as return variables final_X = corrected_filt_X final_Y = corrected_Y elif tf_correction: print('Correcting with all technical factors') tf_Y = Y[t_idx,:] tf_filt_X = filt_X[t_idx,:] tfs[list(ths).index('SMTSISCH')] = np.log2(tfs[list(ths).index('SMTSISCH')] + 1) tf_predictors = tfs #Correct Y lr_Y = LinearRegression() lr_Y.fit(tf_predictors, tf_Y) tf_Y_predicted = lr_Y.predict(tf_predictors) corrected_tf_Y = tf_Y - tf_Y_predicted #Correct X lr_X = LinearRegression() lr_X.fit(tf_predictors, tf_filt_X) tf_filt_X_predicted = lr_X.predict(tf_predictors) corrected_tf_filt_X = tf_filt_X - tf_filt_X_predicted # Set as return variables final_X = corrected_tf_filt_X final_Y = corrected_tf_Y else: # Set unmodified values as return variables final_X = filt_X final_Y = Y return final_Y, final_X, dIDs, filt_tIDs, tfs, ths, t_idx def filter_features(Y, N): """ Return top N varying image features. """ most_varying_feature_idx = np.argsort(np.std(Y, axis=0))[-N:] filt_Y = Y[:, most_varying_feature_idx] return filt_Y, most_varying_feature_idx def filter_expression(X, tIDs, M, k): """ Return top M varying transcripts, with mean expression > k, along with their transcript names. """ k_threshold_idx = np.mean(X, axis=0) > k M_varying_idx = np.argsort(np.std(X[:,k_threshold_idx], axis=0))[-M:] idx = np.array(list(range(X.shape[1]))) final_exp_idx = idx[k_threshold_idx][M_varying_idx] filt_X = X[:, final_exp_idx] filt_tIDs = tIDs[final_exp_idx] return filt_X, filt_tIDs, final_exp_idx def compute_pearsonR(Y, X, parallel=False, verbose=False): """ Perform pairwise associations between filt_features and filt_expression. Also computes pvalues for 1 random shuffles. """ # Make sure all features are > 0 # X[X < 0] = 0 N = Y.shape[1] M = X.shape[1] if parallel: print('Computing in parallel') results = {} shuffle = ['real', 'shuffle'] for sh in shuffle: print ("Shuffle: {}".format(sh)) Y_copy = Y.copy() shuf_idx = list(range(Y.shape[0])) if sh != 'real': np.random.shuffle(shuf_idx) Y_copy = Y_copy[shuf_idx, :] if parallel: def perform_pearsonr(idx): i, j = idx R, pv = pearsonr(Y_copy[:, i], X[:, j]) # pbar.update(1) return R, pv indicies = [] for i in range(N): for j in range(M): idx = (i,j) indicies.append(idx) import pathos import time pool = pathos.pools.ProcessPool(node=32) results = pool.map(perform_pearsonr, indicies) R_mat = np.array([x[0] for x in results]).reshape(N,M) pvs = np.array([x[1] for x in parallel_results]).reshape(N,M) else: pbar = tqdm(total=N*M) R_mat = np.zeros((N, M)) pvs = np.zeros((N, M)) for i in range(N): for j in range(M): R, pv = pearsonr(Y_copy[:, i], X[:, j]) R_mat[i, j] = R pvs[i, j] = pv pbar.update(1) pbar.close() results['Rs_{}'.format(sh)] = R_mat results['pvs_{}'.format(sh)] = pvs return results['Rs_real'], results['pvs_real'], results['pvs_shuffle'] def create_tissue_boundary(ID, tissue, patchsize): from openslide import open_slide image_filepath = os.path.join(GTEx_directory, 'data', 'raw', tissue, ID + '.svs') image_slide = open_slide(image_filepath) toplevel = image_slide.level_count - 1 topdim = image_slide.level_dimensions[-1] topdownsample = image_slide.level_downsamples[-1] topdownsampleint = int(topdownsample) toplevelslide = image_slide.read_region((0, 0), toplevel, topdim) toplevelslide = np.array(toplevelslide) toplevelslide = toplevelslide[:, :, 0:3] slide = toplevelslide blurredslide = cv2.GaussianBlur(slide, (51, 51), 0) blurredslide = cv2.cvtColor(blurredslide, cv2.COLOR_BGR2GRAY) T_otsu = mahotas.otsu(blurredslide) mask = np.zeros_like(slide) mask = mask[:, :, 0] mask[blurredslide < T_otsu] = 255 downsampledpatchsize = patchsize / topdownsampleint xlimit = int(topdim[1] / downsampledpatchsize) ylimit = int(topdim[0] / downsampledpatchsize) # Find downsampled coords coords = [] for i in range(xlimit): for j in range(ylimit): x = int(downsampledpatchsize/2 + i*downsampledpatchsize) y = int(downsampledpatchsize/2 + j*downsampledpatchsize) coords.append((x, y)) # Find coords in downsampled mask mask_coords = [] for c in coords: x = c[0] y = c[1] if mask[x, y] > 0: mask_coords.append(c) slidemarkings = slide.copy() for c in mask_coords: x = c[0] y = c[1] slidemarkings[x-3:x+3, y-3:y+3] = [0, 0, 255] return slide, mask, slidemarkings def top5_bottom5_image(tissue, model, patchsize, feature): """ Displays thumbnails of the top 5 and bottom 5 images that activate a given image features at a specific patchsize """ from openslide import open_slide features, expression, donorIDs, transcriptIDs, technical_factors, technical_headers, technical_idx = extract_final_layer_data(tissue, model, 'mean', patchsize) sorted_idx = np.argsort(features[:,feature - 1]) donorIDs_ordered = donorIDs[sorted_idx] tissue_filepath = os.path.join(GTEx_directory,'data','raw',tissue) LungGTExIDs = os.listdir(tissue_filepath) LungdonorIDs = [x.split('.')[0].split('-')[1] for x in LungGTExIDs] ordered_GTExIDs = np.array(LungGTExIDs)[[LungdonorIDs.index(x.decode('utf-8')) for x in donorIDs_ordered]] topIDs = ordered_GTExIDs[-5:] bottomIDs = ordered_GTExIDs[:5] top_five_images = [] bottom_five_images = [] for (k,ID) in enumerate(topIDs): image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID) slide = open_slide(image_filepath) x = slide.get_thumbnail(size=(400,400)) top_five_images.append(x) for (k,ID) in enumerate(bottomIDs): image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID) slide = open_slide(image_filepath) x = slide.get_thumbnail(size=(400,400)) bottom_five_images.append(x) return top_five_images, bottom_five_images def estimate_lambda(pv): """estimate lambda form a set of PV""" LOD2 = sp.median(st.chi2.isf(pv, 1)) null_median = st.chi2.median(1) L = (LOD2 / null_median) return L def display_tissue_feature_gradient(feature, tissue): from openslide import open_slide features, expression, donorIDs, transcriptIDs, technical_factors, technical_headers, technical_idx = extract_final_layer_data(tissue, 'retrained', 'mean', '256') sorted_idx = np.argsort(features[:,feature - 1]) donorIDs_ordered = donorIDs[sorted_idx] gradient_IDs = [donorIDs_ordered[20*i] for i in range(13)] tissue_filepath = os.path.join(GTEx_directory,'data','raw',tissue) LungGTExIDs = os.listdir(tissue_filepath) LungdonorIDs = [x.split('.')[0].split('-')[1] for x in LungGTExIDs] ordered_GTExIDs = np.array(LungGTExIDs)[[LungdonorIDs.index(x.decode('utf-8')) for x in donorIDs_ordered]] thumbnails = [] pbar = tqdm(total=len(ordered_GTExIDs)) for (k,ID) in enumerate(ordered_GTExIDs): image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID) slide = open_slide(image_filepath) thumbnail = slide.get_thumbnail(size=(400,400)) feature_value = features[:,feature - 1][sorted_idx[k]] thumbnails.append((thumbnail, feature_value)) pbar.update(1) return thumbnails def get_gene_name(transcript): transcript_id = transcript.decode('utf-8').split('.')[0] try: gene_name = data.gene_name_of_gene_id(transcript_id) except: gene_name = transcript_id return gene_name def normalize_feature(original_feature): mu, std = norm.fit(original_feature) target = [np.random.normal()*std + mu for i in range(271)] result = quantile_normalize_using_target(original_feature, target) return result def quantile_normalize_using_target(x, target): """ Both `x` and `target` are numpy arrays of equal lengths. """ target_sorted = np.sort(target) return target_sorted[x.argsort().argsort()]
raise ValueError("Not invertible until scaled")
conditional_block
helpers.py
import os import h5py from matplotlib.colors import Normalize import gzip import pandas as pd import numpy as np from matplotlib import cbook from numpy import ma from scipy.stats import pearsonr from sklearn.linear_model import LinearRegression from sklearn.decomposition import PCA import cv2 # import mahotas import scipy.stats as st import scipy as sp from tqdm import tqdm # from pebble import ProcessPool, ProcessExpired from concurrent.futures import TimeoutError from pyensembl import EnsemblRelease data = EnsemblRelease(77) from scipy.stats import norm GTEx_directory = '/hps/nobackup/research/stegle/users/willj/GTEx' os.environ['PYENSEMBL_CACHE_DIR'] = GTEx_directory class MidPointNorm(Normalize): """ Ensures that heatmap colour bars are zero centered. """ def __init__(self, midpoint=0, vmin=None, vmax=None, clip=False): Normalize.__init__(self,vmin, vmax, clip) self.midpoint = midpoint def __call__(self, value, clip=None): if clip is None: clip = self.clip result, is_scalar = self.process_value(value) self.autoscale_None(result) vmin, vmax, midpoint = self.vmin, self.vmax, self.midpoint if not (vmin < midpoint < vmax): raise ValueError("midpoint must be between maxvalue and minvalue.") elif vmin == vmax: result.fill(0) # Or should it be all masked? Or 0.5? elif vmin > vmax: raise ValueError("maxvalue must be bigger than minvalue") else: vmin = float(vmin) vmax = float(vmax) if clip: mask = ma.getmask(result) result = ma.array(np.clip(result.filled(vmax), vmin, vmax), mask=mask) # ma division is very slow; we can take a shortcut resdat = result.data #First scale to -1 to 1 range, than to from 0 to 1. resdat -= midpoint resdat[resdat>0] /= abs(vmax - midpoint) resdat[resdat<0] /= abs(vmin - midpoint) resdat /= 2. resdat += 0.5 result = ma.array(resdat, mask=result.mask, copy=False) if is_scalar: result = result[0] return result def inverse(self, value): if not self.scaled(): raise ValueError("Not invertible until scaled") vmin, vmax, midpoint = self.vmin, self.vmax, self.midpoint if cbook.iterable(value): val = ma.asarray(value) val = 2 * (val-0.5) val[val>0] *= abs(vmax - midpoint) val[val<0] *= abs(vmin - midpoint) val += midpoint return val else: val = 2 * (val - 0.5) if val < 0: return val*abs(vmin-midpoint) + midpoint else: return val*abs(vmax-midpoint) + midpoint def extract_final_layer_data(t, m, a, ps, genotypes=False, shuffle=False): with h5py.File(GTEx_directory + '/data/h5py/aggregated_features.h5py', 'r') as f: X = f[t]['ordered_expression'].value tIDs = f[t]['transcriptIDs'].value dIDs = f[t]['donorIDs'].value tfs, ths, t_idx = \ get_technical_factors(t, dIDs) size_group = f[t]['-1'][ps] Y = size_group[m][a]['ordered_aggregated_features'].value if shuffle: idx = np.array(range(Y.shape[0])) np.random.shuffle(idx) Y = Y[idx,:] Y[Y < 0] = 0 if genotypes: G = f[t]['ordered_genotypes'].value gIDs = f[t]['genotype_locations'].value return Y, X, G, dIDs, tIDs, gIDs, \ tfs, ths, t_idx else: return Y, X, dIDs, tIDs, \ tfs, ths, t_idx def extract_mid_layer_data(t, l, ca, m, a, ps): with h5py.File(GTEx_directory + '/data/h5py/aggregated_features.h5py', 'r') as f: expression = f[t]['ordered_expression'].value transcriptIDs = f[t]['transcriptIDs'].value donorIDs = f[t]['donorIDs'].value technical_factors, technical_headers, technical_idx = \ get_technical_factors(t, donorIDs) size_group = f[t][l][ca][ps] features = size_group[m][a]['ordered_aggregated_features'].value features[features < 0] = 0 return features, expression, donorIDs, transcriptIDs, \ technical_factors, technical_headers, technical_idx def get_technical_factors(tissue, donorIDs): phenotype_filepath = '/nfs/research2/stegle/stegle_secure/GTEx/download/49139/PhenoGenotypeFiles/RootStudyConsentSet_phs000424.GTEx.v6.p1.c1.GRU/PhenotypeFiles/phs000424.v6.pht002743.v6.p1.c1.GTEx_Sample_Attributes.GRU.txt.gz' with gzip.open(phenotype_filepath, 'rb') as f: g = f.read().splitlines() phenotype_array = [str(x, 'utf-8').split('\t') for x in g if not str(x, 'utf-8').startswith('#')] phenotype_array = phenotype_array[1:] phenotype_df = pd.DataFrame(phenotype_array) phenotype_df.columns = phenotype_df.iloc[0] phenotype_df = phenotype_df[1:] tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue] donorIDs = [x.decode('utf-8') for x in donorIDs] phenotype_donorIDs = [x.split('-')[1] for x in tissue_df['SAMPID']] phenotype_idx = [phenotype_donorIDs.index(ID) for ID in donorIDs] tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue] tissue_df = tissue_df.iloc[phenotype_idx, :] SMCENTER_dummy = pd.get_dummies(tissue_df['SMCENTER']) for d in SMCENTER_dummy.columns: tissue_df['SMCENTER_' + d] = SMCENTER_dummy[d] clean_tissue_df = pd.DataFrame() for col in tissue_df.columns: clean_factor = pd.to_numeric(tissue_df[col], errors='coerce') clean_tissue_df[col] = clean_factor clean_tissue_df = clean_tissue_df.dropna(how='all', axis=1) technical_idx = np.array(clean_tissue_df.isnull().sum(axis=1) == 0) clean_tissue_df = clean_tissue_df.dropna(how='any', axis=0) technical_factors, technical_headers = \ np.array(clean_tissue_df), clean_tissue_df.columns technical_headers = technical_headers[technical_factors.std(0) > 0] technical_factors = technical_factors[:,technical_factors.std(0) > 0] return technical_factors, technical_headers, technical_idx def filter_and_correct_expression_and_image_features(tissue, model, aggregation, patch_size, M, k, pc_correction=False, tf_correction=False): """ Computes M most varying pvalues across all patch sizes. - Filters to the top M most varying genes that have mean expression > k. Optional: - Performs PC correction - regresses out effect of first x PCs from image features, and substracts the first x PCs from the expression matrix. - Performs TF correction - regresses out effect of five PCs from both the image features, and expression. """ # Filter expression Y, X, dIDs, tIDs, tfs, ths, t_idx = extract_final_layer_data(tissue, model, aggregation, patch_size) filt_X, filt_tIDs, final_exp_idx = filter_expression(X, tIDs, M, k) if pc_correction: print ('Correcting with {} expression PCs'.format(pc_correction)) pca = PCA(n_components=pc_correction) pca_predictors = pca.fit_transform(filt_X) # Correct Y lr = LinearRegression() lr.fit(pca_predictors, Y) predicted_Y = lr.predict(pca_predictors) corrected_Y = Y - predicted_Y # Correct X projected_filt_X = np.dot(pca_predictors,pca.components_) corrected_filt_X = filt_X - projected_filt_X # Set as return variables final_X = corrected_filt_X final_Y = corrected_Y elif tf_correction: print('Correcting with all technical factors') tf_Y = Y[t_idx,:] tf_filt_X = filt_X[t_idx,:] tfs[list(ths).index('SMTSISCH')] = np.log2(tfs[list(ths).index('SMTSISCH')] + 1) tf_predictors = tfs #Correct Y lr_Y = LinearRegression() lr_Y.fit(tf_predictors, tf_Y) tf_Y_predicted = lr_Y.predict(tf_predictors) corrected_tf_Y = tf_Y - tf_Y_predicted #Correct X lr_X = LinearRegression() lr_X.fit(tf_predictors, tf_filt_X) tf_filt_X_predicted = lr_X.predict(tf_predictors) corrected_tf_filt_X = tf_filt_X - tf_filt_X_predicted # Set as return variables final_X = corrected_tf_filt_X final_Y = corrected_tf_Y else: # Set unmodified values as return variables final_X = filt_X final_Y = Y return final_Y, final_X, dIDs, filt_tIDs, tfs, ths, t_idx def filter_features(Y, N): """ Return top N varying image features. """ most_varying_feature_idx = np.argsort(np.std(Y, axis=0))[-N:] filt_Y = Y[:, most_varying_feature_idx] return filt_Y, most_varying_feature_idx def filter_expression(X, tIDs, M, k): """ Return top M varying transcripts, with mean expression > k, along with their transcript names. """ k_threshold_idx = np.mean(X, axis=0) > k M_varying_idx = np.argsort(np.std(X[:,k_threshold_idx], axis=0))[-M:] idx = np.array(list(range(X.shape[1]))) final_exp_idx = idx[k_threshold_idx][M_varying_idx] filt_X = X[:, final_exp_idx] filt_tIDs = tIDs[final_exp_idx] return filt_X, filt_tIDs, final_exp_idx def compute_pearsonR(Y, X, parallel=False, verbose=False): """ Perform pairwise associations between filt_features and filt_expression. Also computes pvalues for 1 random shuffles. """ # Make sure all features are > 0 # X[X < 0] = 0 N = Y.shape[1] M = X.shape[1] if parallel: print('Computing in parallel') results = {} shuffle = ['real', 'shuffle'] for sh in shuffle: print ("Shuffle: {}".format(sh)) Y_copy = Y.copy() shuf_idx = list(range(Y.shape[0])) if sh != 'real': np.random.shuffle(shuf_idx) Y_copy = Y_copy[shuf_idx, :] if parallel: def perform_pearsonr(idx): i, j = idx R, pv = pearsonr(Y_copy[:, i], X[:, j]) # pbar.update(1) return R, pv indicies = [] for i in range(N): for j in range(M): idx = (i,j) indicies.append(idx) import pathos import time pool = pathos.pools.ProcessPool(node=32) results = pool.map(perform_pearsonr, indicies) R_mat = np.array([x[0] for x in results]).reshape(N,M) pvs = np.array([x[1] for x in parallel_results]).reshape(N,M) else: pbar = tqdm(total=N*M) R_mat = np.zeros((N, M)) pvs = np.zeros((N, M)) for i in range(N): for j in range(M): R, pv = pearsonr(Y_copy[:, i], X[:, j]) R_mat[i, j] = R pvs[i, j] = pv pbar.update(1) pbar.close() results['Rs_{}'.format(sh)] = R_mat results['pvs_{}'.format(sh)] = pvs return results['Rs_real'], results['pvs_real'], results['pvs_shuffle'] def create_tissue_boundary(ID, tissue, patchsize): from openslide import open_slide image_filepath = os.path.join(GTEx_directory, 'data', 'raw', tissue, ID + '.svs') image_slide = open_slide(image_filepath) toplevel = image_slide.level_count - 1 topdim = image_slide.level_dimensions[-1] topdownsample = image_slide.level_downsamples[-1] topdownsampleint = int(topdownsample) toplevelslide = image_slide.read_region((0, 0), toplevel, topdim) toplevelslide = np.array(toplevelslide) toplevelslide = toplevelslide[:, :, 0:3] slide = toplevelslide
blurredslide = cv2.GaussianBlur(slide, (51, 51), 0) blurredslide = cv2.cvtColor(blurredslide, cv2.COLOR_BGR2GRAY) T_otsu = mahotas.otsu(blurredslide) mask = np.zeros_like(slide) mask = mask[:, :, 0] mask[blurredslide < T_otsu] = 255 downsampledpatchsize = patchsize / topdownsampleint xlimit = int(topdim[1] / downsampledpatchsize) ylimit = int(topdim[0] / downsampledpatchsize) # Find downsampled coords coords = [] for i in range(xlimit): for j in range(ylimit): x = int(downsampledpatchsize/2 + i*downsampledpatchsize) y = int(downsampledpatchsize/2 + j*downsampledpatchsize) coords.append((x, y)) # Find coords in downsampled mask mask_coords = [] for c in coords: x = c[0] y = c[1] if mask[x, y] > 0: mask_coords.append(c) slidemarkings = slide.copy() for c in mask_coords: x = c[0] y = c[1] slidemarkings[x-3:x+3, y-3:y+3] = [0, 0, 255] return slide, mask, slidemarkings def top5_bottom5_image(tissue, model, patchsize, feature): """ Displays thumbnails of the top 5 and bottom 5 images that activate a given image features at a specific patchsize """ from openslide import open_slide features, expression, donorIDs, transcriptIDs, technical_factors, technical_headers, technical_idx = extract_final_layer_data(tissue, model, 'mean', patchsize) sorted_idx = np.argsort(features[:,feature - 1]) donorIDs_ordered = donorIDs[sorted_idx] tissue_filepath = os.path.join(GTEx_directory,'data','raw',tissue) LungGTExIDs = os.listdir(tissue_filepath) LungdonorIDs = [x.split('.')[0].split('-')[1] for x in LungGTExIDs] ordered_GTExIDs = np.array(LungGTExIDs)[[LungdonorIDs.index(x.decode('utf-8')) for x in donorIDs_ordered]] topIDs = ordered_GTExIDs[-5:] bottomIDs = ordered_GTExIDs[:5] top_five_images = [] bottom_five_images = [] for (k,ID) in enumerate(topIDs): image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID) slide = open_slide(image_filepath) x = slide.get_thumbnail(size=(400,400)) top_five_images.append(x) for (k,ID) in enumerate(bottomIDs): image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID) slide = open_slide(image_filepath) x = slide.get_thumbnail(size=(400,400)) bottom_five_images.append(x) return top_five_images, bottom_five_images def estimate_lambda(pv): """estimate lambda form a set of PV""" LOD2 = sp.median(st.chi2.isf(pv, 1)) null_median = st.chi2.median(1) L = (LOD2 / null_median) return L def display_tissue_feature_gradient(feature, tissue): from openslide import open_slide features, expression, donorIDs, transcriptIDs, technical_factors, technical_headers, technical_idx = extract_final_layer_data(tissue, 'retrained', 'mean', '256') sorted_idx = np.argsort(features[:,feature - 1]) donorIDs_ordered = donorIDs[sorted_idx] gradient_IDs = [donorIDs_ordered[20*i] for i in range(13)] tissue_filepath = os.path.join(GTEx_directory,'data','raw',tissue) LungGTExIDs = os.listdir(tissue_filepath) LungdonorIDs = [x.split('.')[0].split('-')[1] for x in LungGTExIDs] ordered_GTExIDs = np.array(LungGTExIDs)[[LungdonorIDs.index(x.decode('utf-8')) for x in donorIDs_ordered]] thumbnails = [] pbar = tqdm(total=len(ordered_GTExIDs)) for (k,ID) in enumerate(ordered_GTExIDs): image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID) slide = open_slide(image_filepath) thumbnail = slide.get_thumbnail(size=(400,400)) feature_value = features[:,feature - 1][sorted_idx[k]] thumbnails.append((thumbnail, feature_value)) pbar.update(1) return thumbnails def get_gene_name(transcript): transcript_id = transcript.decode('utf-8').split('.')[0] try: gene_name = data.gene_name_of_gene_id(transcript_id) except: gene_name = transcript_id return gene_name def normalize_feature(original_feature): mu, std = norm.fit(original_feature) target = [np.random.normal()*std + mu for i in range(271)] result = quantile_normalize_using_target(original_feature, target) return result def quantile_normalize_using_target(x, target): """ Both `x` and `target` are numpy arrays of equal lengths. """ target_sorted = np.sort(target) return target_sorted[x.argsort().argsort()]
random_line_split
helpers.py
import os import h5py from matplotlib.colors import Normalize import gzip import pandas as pd import numpy as np from matplotlib import cbook from numpy import ma from scipy.stats import pearsonr from sklearn.linear_model import LinearRegression from sklearn.decomposition import PCA import cv2 # import mahotas import scipy.stats as st import scipy as sp from tqdm import tqdm # from pebble import ProcessPool, ProcessExpired from concurrent.futures import TimeoutError from pyensembl import EnsemblRelease data = EnsemblRelease(77) from scipy.stats import norm GTEx_directory = '/hps/nobackup/research/stegle/users/willj/GTEx' os.environ['PYENSEMBL_CACHE_DIR'] = GTEx_directory class MidPointNorm(Normalize): """ Ensures that heatmap colour bars are zero centered. """ def __init__(self, midpoint=0, vmin=None, vmax=None, clip=False): Normalize.__init__(self,vmin, vmax, clip) self.midpoint = midpoint def __call__(self, value, clip=None): if clip is None: clip = self.clip result, is_scalar = self.process_value(value) self.autoscale_None(result) vmin, vmax, midpoint = self.vmin, self.vmax, self.midpoint if not (vmin < midpoint < vmax): raise ValueError("midpoint must be between maxvalue and minvalue.") elif vmin == vmax: result.fill(0) # Or should it be all masked? Or 0.5? elif vmin > vmax: raise ValueError("maxvalue must be bigger than minvalue") else: vmin = float(vmin) vmax = float(vmax) if clip: mask = ma.getmask(result) result = ma.array(np.clip(result.filled(vmax), vmin, vmax), mask=mask) # ma division is very slow; we can take a shortcut resdat = result.data #First scale to -1 to 1 range, than to from 0 to 1. resdat -= midpoint resdat[resdat>0] /= abs(vmax - midpoint) resdat[resdat<0] /= abs(vmin - midpoint) resdat /= 2. resdat += 0.5 result = ma.array(resdat, mask=result.mask, copy=False) if is_scalar: result = result[0] return result def inverse(self, value): if not self.scaled(): raise ValueError("Not invertible until scaled") vmin, vmax, midpoint = self.vmin, self.vmax, self.midpoint if cbook.iterable(value): val = ma.asarray(value) val = 2 * (val-0.5) val[val>0] *= abs(vmax - midpoint) val[val<0] *= abs(vmin - midpoint) val += midpoint return val else: val = 2 * (val - 0.5) if val < 0: return val*abs(vmin-midpoint) + midpoint else: return val*abs(vmax-midpoint) + midpoint def extract_final_layer_data(t, m, a, ps, genotypes=False, shuffle=False): with h5py.File(GTEx_directory + '/data/h5py/aggregated_features.h5py', 'r') as f: X = f[t]['ordered_expression'].value tIDs = f[t]['transcriptIDs'].value dIDs = f[t]['donorIDs'].value tfs, ths, t_idx = \ get_technical_factors(t, dIDs) size_group = f[t]['-1'][ps] Y = size_group[m][a]['ordered_aggregated_features'].value if shuffle: idx = np.array(range(Y.shape[0])) np.random.shuffle(idx) Y = Y[idx,:] Y[Y < 0] = 0 if genotypes: G = f[t]['ordered_genotypes'].value gIDs = f[t]['genotype_locations'].value return Y, X, G, dIDs, tIDs, gIDs, \ tfs, ths, t_idx else: return Y, X, dIDs, tIDs, \ tfs, ths, t_idx def extract_mid_layer_data(t, l, ca, m, a, ps): with h5py.File(GTEx_directory + '/data/h5py/aggregated_features.h5py', 'r') as f: expression = f[t]['ordered_expression'].value transcriptIDs = f[t]['transcriptIDs'].value donorIDs = f[t]['donorIDs'].value technical_factors, technical_headers, technical_idx = \ get_technical_factors(t, donorIDs) size_group = f[t][l][ca][ps] features = size_group[m][a]['ordered_aggregated_features'].value features[features < 0] = 0 return features, expression, donorIDs, transcriptIDs, \ technical_factors, technical_headers, technical_idx def get_technical_factors(tissue, donorIDs): phenotype_filepath = '/nfs/research2/stegle/stegle_secure/GTEx/download/49139/PhenoGenotypeFiles/RootStudyConsentSet_phs000424.GTEx.v6.p1.c1.GRU/PhenotypeFiles/phs000424.v6.pht002743.v6.p1.c1.GTEx_Sample_Attributes.GRU.txt.gz' with gzip.open(phenotype_filepath, 'rb') as f: g = f.read().splitlines() phenotype_array = [str(x, 'utf-8').split('\t') for x in g if not str(x, 'utf-8').startswith('#')] phenotype_array = phenotype_array[1:] phenotype_df = pd.DataFrame(phenotype_array) phenotype_df.columns = phenotype_df.iloc[0] phenotype_df = phenotype_df[1:] tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue] donorIDs = [x.decode('utf-8') for x in donorIDs] phenotype_donorIDs = [x.split('-')[1] for x in tissue_df['SAMPID']] phenotype_idx = [phenotype_donorIDs.index(ID) for ID in donorIDs] tissue_df = phenotype_df[phenotype_df['SMTSD'] == tissue] tissue_df = tissue_df.iloc[phenotype_idx, :] SMCENTER_dummy = pd.get_dummies(tissue_df['SMCENTER']) for d in SMCENTER_dummy.columns: tissue_df['SMCENTER_' + d] = SMCENTER_dummy[d] clean_tissue_df = pd.DataFrame() for col in tissue_df.columns: clean_factor = pd.to_numeric(tissue_df[col], errors='coerce') clean_tissue_df[col] = clean_factor clean_tissue_df = clean_tissue_df.dropna(how='all', axis=1) technical_idx = np.array(clean_tissue_df.isnull().sum(axis=1) == 0) clean_tissue_df = clean_tissue_df.dropna(how='any', axis=0) technical_factors, technical_headers = \ np.array(clean_tissue_df), clean_tissue_df.columns technical_headers = technical_headers[technical_factors.std(0) > 0] technical_factors = technical_factors[:,technical_factors.std(0) > 0] return technical_factors, technical_headers, technical_idx def filter_and_correct_expression_and_image_features(tissue, model, aggregation, patch_size, M, k, pc_correction=False, tf_correction=False): """ Computes M most varying pvalues across all patch sizes. - Filters to the top M most varying genes that have mean expression > k. Optional: - Performs PC correction - regresses out effect of first x PCs from image features, and substracts the first x PCs from the expression matrix. - Performs TF correction - regresses out effect of five PCs from both the image features, and expression. """ # Filter expression Y, X, dIDs, tIDs, tfs, ths, t_idx = extract_final_layer_data(tissue, model, aggregation, patch_size) filt_X, filt_tIDs, final_exp_idx = filter_expression(X, tIDs, M, k) if pc_correction: print ('Correcting with {} expression PCs'.format(pc_correction)) pca = PCA(n_components=pc_correction) pca_predictors = pca.fit_transform(filt_X) # Correct Y lr = LinearRegression() lr.fit(pca_predictors, Y) predicted_Y = lr.predict(pca_predictors) corrected_Y = Y - predicted_Y # Correct X projected_filt_X = np.dot(pca_predictors,pca.components_) corrected_filt_X = filt_X - projected_filt_X # Set as return variables final_X = corrected_filt_X final_Y = corrected_Y elif tf_correction: print('Correcting with all technical factors') tf_Y = Y[t_idx,:] tf_filt_X = filt_X[t_idx,:] tfs[list(ths).index('SMTSISCH')] = np.log2(tfs[list(ths).index('SMTSISCH')] + 1) tf_predictors = tfs #Correct Y lr_Y = LinearRegression() lr_Y.fit(tf_predictors, tf_Y) tf_Y_predicted = lr_Y.predict(tf_predictors) corrected_tf_Y = tf_Y - tf_Y_predicted #Correct X lr_X = LinearRegression() lr_X.fit(tf_predictors, tf_filt_X) tf_filt_X_predicted = lr_X.predict(tf_predictors) corrected_tf_filt_X = tf_filt_X - tf_filt_X_predicted # Set as return variables final_X = corrected_tf_filt_X final_Y = corrected_tf_Y else: # Set unmodified values as return variables final_X = filt_X final_Y = Y return final_Y, final_X, dIDs, filt_tIDs, tfs, ths, t_idx def filter_features(Y, N):
def filter_expression(X, tIDs, M, k): """ Return top M varying transcripts, with mean expression > k, along with their transcript names. """ k_threshold_idx = np.mean(X, axis=0) > k M_varying_idx = np.argsort(np.std(X[:,k_threshold_idx], axis=0))[-M:] idx = np.array(list(range(X.shape[1]))) final_exp_idx = idx[k_threshold_idx][M_varying_idx] filt_X = X[:, final_exp_idx] filt_tIDs = tIDs[final_exp_idx] return filt_X, filt_tIDs, final_exp_idx def compute_pearsonR(Y, X, parallel=False, verbose=False): """ Perform pairwise associations between filt_features and filt_expression. Also computes pvalues for 1 random shuffles. """ # Make sure all features are > 0 # X[X < 0] = 0 N = Y.shape[1] M = X.shape[1] if parallel: print('Computing in parallel') results = {} shuffle = ['real', 'shuffle'] for sh in shuffle: print ("Shuffle: {}".format(sh)) Y_copy = Y.copy() shuf_idx = list(range(Y.shape[0])) if sh != 'real': np.random.shuffle(shuf_idx) Y_copy = Y_copy[shuf_idx, :] if parallel: def perform_pearsonr(idx): i, j = idx R, pv = pearsonr(Y_copy[:, i], X[:, j]) # pbar.update(1) return R, pv indicies = [] for i in range(N): for j in range(M): idx = (i,j) indicies.append(idx) import pathos import time pool = pathos.pools.ProcessPool(node=32) results = pool.map(perform_pearsonr, indicies) R_mat = np.array([x[0] for x in results]).reshape(N,M) pvs = np.array([x[1] for x in parallel_results]).reshape(N,M) else: pbar = tqdm(total=N*M) R_mat = np.zeros((N, M)) pvs = np.zeros((N, M)) for i in range(N): for j in range(M): R, pv = pearsonr(Y_copy[:, i], X[:, j]) R_mat[i, j] = R pvs[i, j] = pv pbar.update(1) pbar.close() results['Rs_{}'.format(sh)] = R_mat results['pvs_{}'.format(sh)] = pvs return results['Rs_real'], results['pvs_real'], results['pvs_shuffle'] def create_tissue_boundary(ID, tissue, patchsize): from openslide import open_slide image_filepath = os.path.join(GTEx_directory, 'data', 'raw', tissue, ID + '.svs') image_slide = open_slide(image_filepath) toplevel = image_slide.level_count - 1 topdim = image_slide.level_dimensions[-1] topdownsample = image_slide.level_downsamples[-1] topdownsampleint = int(topdownsample) toplevelslide = image_slide.read_region((0, 0), toplevel, topdim) toplevelslide = np.array(toplevelslide) toplevelslide = toplevelslide[:, :, 0:3] slide = toplevelslide blurredslide = cv2.GaussianBlur(slide, (51, 51), 0) blurredslide = cv2.cvtColor(blurredslide, cv2.COLOR_BGR2GRAY) T_otsu = mahotas.otsu(blurredslide) mask = np.zeros_like(slide) mask = mask[:, :, 0] mask[blurredslide < T_otsu] = 255 downsampledpatchsize = patchsize / topdownsampleint xlimit = int(topdim[1] / downsampledpatchsize) ylimit = int(topdim[0] / downsampledpatchsize) # Find downsampled coords coords = [] for i in range(xlimit): for j in range(ylimit): x = int(downsampledpatchsize/2 + i*downsampledpatchsize) y = int(downsampledpatchsize/2 + j*downsampledpatchsize) coords.append((x, y)) # Find coords in downsampled mask mask_coords = [] for c in coords: x = c[0] y = c[1] if mask[x, y] > 0: mask_coords.append(c) slidemarkings = slide.copy() for c in mask_coords: x = c[0] y = c[1] slidemarkings[x-3:x+3, y-3:y+3] = [0, 0, 255] return slide, mask, slidemarkings def top5_bottom5_image(tissue, model, patchsize, feature): """ Displays thumbnails of the top 5 and bottom 5 images that activate a given image features at a specific patchsize """ from openslide import open_slide features, expression, donorIDs, transcriptIDs, technical_factors, technical_headers, technical_idx = extract_final_layer_data(tissue, model, 'mean', patchsize) sorted_idx = np.argsort(features[:,feature - 1]) donorIDs_ordered = donorIDs[sorted_idx] tissue_filepath = os.path.join(GTEx_directory,'data','raw',tissue) LungGTExIDs = os.listdir(tissue_filepath) LungdonorIDs = [x.split('.')[0].split('-')[1] for x in LungGTExIDs] ordered_GTExIDs = np.array(LungGTExIDs)[[LungdonorIDs.index(x.decode('utf-8')) for x in donorIDs_ordered]] topIDs = ordered_GTExIDs[-5:] bottomIDs = ordered_GTExIDs[:5] top_five_images = [] bottom_five_images = [] for (k,ID) in enumerate(topIDs): image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID) slide = open_slide(image_filepath) x = slide.get_thumbnail(size=(400,400)) top_five_images.append(x) for (k,ID) in enumerate(bottomIDs): image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID) slide = open_slide(image_filepath) x = slide.get_thumbnail(size=(400,400)) bottom_five_images.append(x) return top_five_images, bottom_five_images def estimate_lambda(pv): """estimate lambda form a set of PV""" LOD2 = sp.median(st.chi2.isf(pv, 1)) null_median = st.chi2.median(1) L = (LOD2 / null_median) return L def display_tissue_feature_gradient(feature, tissue): from openslide import open_slide features, expression, donorIDs, transcriptIDs, technical_factors, technical_headers, technical_idx = extract_final_layer_data(tissue, 'retrained', 'mean', '256') sorted_idx = np.argsort(features[:,feature - 1]) donorIDs_ordered = donorIDs[sorted_idx] gradient_IDs = [donorIDs_ordered[20*i] for i in range(13)] tissue_filepath = os.path.join(GTEx_directory,'data','raw',tissue) LungGTExIDs = os.listdir(tissue_filepath) LungdonorIDs = [x.split('.')[0].split('-')[1] for x in LungGTExIDs] ordered_GTExIDs = np.array(LungGTExIDs)[[LungdonorIDs.index(x.decode('utf-8')) for x in donorIDs_ordered]] thumbnails = [] pbar = tqdm(total=len(ordered_GTExIDs)) for (k,ID) in enumerate(ordered_GTExIDs): image_filepath = os.path.join(GTEx_directory,'data','raw','Lung', ID) slide = open_slide(image_filepath) thumbnail = slide.get_thumbnail(size=(400,400)) feature_value = features[:,feature - 1][sorted_idx[k]] thumbnails.append((thumbnail, feature_value)) pbar.update(1) return thumbnails def get_gene_name(transcript): transcript_id = transcript.decode('utf-8').split('.')[0] try: gene_name = data.gene_name_of_gene_id(transcript_id) except: gene_name = transcript_id return gene_name def normalize_feature(original_feature): mu, std = norm.fit(original_feature) target = [np.random.normal()*std + mu for i in range(271)] result = quantile_normalize_using_target(original_feature, target) return result def quantile_normalize_using_target(x, target): """ Both `x` and `target` are numpy arrays of equal lengths. """ target_sorted = np.sort(target) return target_sorted[x.argsort().argsort()]
""" Return top N varying image features. """ most_varying_feature_idx = np.argsort(np.std(Y, axis=0))[-N:] filt_Y = Y[:, most_varying_feature_idx] return filt_Y, most_varying_feature_idx
identifier_body
object_ptr.rs
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ use std::convert::TryFrom; use std::ffi::CString; use std::ptr::NonNull; use std::sync::atomic::AtomicI32; use tvm_sys::ffi::{self, TVMObjectFree, TVMObjectRetain, TVMObjectTypeKey2Index}; use tvm_sys::{ArgValue, RetValue}; use crate::errors::Error; type Deleter = unsafe extern "C" fn(object: *mut Object) -> (); #[derive(Debug)] #[repr(C)] pub struct Object { pub type_index: u32, // TODO(@jroesch): pretty sure Rust and C++ atomics are the same, but not sure. // NB: in general we should not touch this in Rust. pub(self) ref_count: AtomicI32, pub fdeleter: Deleter, } unsafe extern "C" fn delete<T: IsObject>(object: *mut Object) { let typed_object: *mut T = std::mem::transmute(object); T::typed_delete(typed_object); } fn derived_from(child_type_index: u32, parent_type_index: u32) -> bool { let mut is_derived = 0; crate::check_call!(ffi::TVMObjectDerivedFrom( child_type_index, parent_type_index, &mut is_derived )); if is_derived == 0 { false } else { true } } impl Object { fn new(type_index: u32, deleter: Deleter) -> Object { Object { type_index, // Note: do not touch this field directly again, this is // a critical section, we write a 1 to the atomic which will now // be managed by the C++ atomics. // In the future we should probably use C-atomcis. ref_count: AtomicI32::new(0), fdeleter: deleter, } } fn get_type_index<T: IsObject>() -> u32 { let type_key = T::TYPE_KEY; let cstring = CString::new(type_key).expect("type key must not contain null characters"); if type_key == "Object" { return 0; } else { let mut index = 0; unsafe { let index_ptr = std::mem::transmute(&mut index); if TVMObjectTypeKey2Index(cstring.as_ptr(), index_ptr) != 0 { panic!(crate::get_last_error()) } } return index; } } pub fn base_object<T: IsObject>() -> Object { let index = Object::get_type_index::<T>(); Object::new(index, delete::<T>) } pub(self) fn inc_ref(&self) { unsafe { let raw_ptr = std::mem::transmute(self); assert_eq!(TVMObjectRetain(raw_ptr), 0); } } pub(self) fn dec_ref(&self) { unsafe { let raw_ptr = std::mem::transmute(self); assert_eq!(TVMObjectFree(raw_ptr), 0); } } } pub unsafe trait IsObject { const TYPE_KEY: &'static str; fn as_object<'s>(&'s self) -> &'s Object; unsafe extern "C" fn typed_delete(object: *mut Self) { let object = Box::from_raw(object); drop(object) } } unsafe impl IsObject for Object { const TYPE_KEY: &'static str = "Object"; fn as_object<'s>(&'s self) -> &'s Object { self } } #[repr(C)] pub struct ObjectPtr<T: IsObject> { pub ptr: NonNull<T>, } fn inc_ref<T: IsObject>(ptr: NonNull<T>) { unsafe { ptr.as_ref().as_object().inc_ref() } } fn dec_ref<T: IsObject>(ptr: NonNull<T>) { unsafe { ptr.as_ref().as_object().dec_ref() } } impl ObjectPtr<Object> { fn from_raw(object_ptr: *mut Object) -> Option<ObjectPtr<Object>> { let non_null = NonNull::new(object_ptr); non_null.map(|ptr| ObjectPtr { ptr }) } } impl<T: IsObject> Clone for ObjectPtr<T> { fn clone(&self) -> Self { inc_ref(self.ptr); ObjectPtr { ptr: self.ptr } } } impl<T: IsObject> Drop for ObjectPtr<T> { fn drop(&mut self) { dec_ref(self.ptr); } } impl<T: IsObject> ObjectPtr<T> { pub fn leak<'a>(object_ptr: ObjectPtr<T>) -> &'a mut T where T: 'a, { unsafe { &mut *std::mem::ManuallyDrop::new(object_ptr).ptr.as_ptr() } } pub fn new(object: T) -> ObjectPtr<T> { let object_ptr = Box::new(object); let object_ptr = Box::leak(object_ptr); let ptr = NonNull::from(object_ptr); inc_ref(ptr); ObjectPtr { ptr } } pub fn count(&self) -> i32 { // need to do atomic read in C++ // ABI compatible atomics is funky/hard. self.as_object() .ref_count .load(std::sync::atomic::Ordering::SeqCst) } fn as_object<'s>(&'s self) -> &'s Object { unsafe { self.ptr.as_ref().as_object() } } pub fn upcast(&self) -> ObjectPtr<Object> { ObjectPtr { ptr: self.ptr.cast(), } } pub fn
<U: IsObject>(&self) -> Result<ObjectPtr<U>, Error> { let child_index = Object::get_type_index::<U>(); let object_index = self.as_object().type_index; let is_derived = if child_index == object_index { true } else { // TODO(@jroesch): write tests derived_from(object_index, child_index) }; if is_derived { Ok(ObjectPtr { ptr: self.ptr.cast(), }) } else { Err(Error::downcast("TODOget_type_key".into(), U::TYPE_KEY)) } } } impl<T: IsObject> std::ops::Deref for ObjectPtr<T> { type Target = T; fn deref(&self) -> &Self::Target { unsafe { self.ptr.as_ref() } } } impl<'a, T: IsObject> From<ObjectPtr<T>> for RetValue { fn from(object_ptr: ObjectPtr<T>) -> RetValue { let raw_object_ptr = ObjectPtr::leak(object_ptr); let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) }; RetValue::ObjectHandle(void_ptr) } } impl<'a, T: IsObject> TryFrom<RetValue> for ObjectPtr<T> { type Error = Error; fn try_from(ret_value: RetValue) -> Result<ObjectPtr<T>, Self::Error> { match ret_value { RetValue::ObjectHandle(handle) => { let handle: *mut Object = unsafe { std::mem::transmute(handle) }; let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?; optr.downcast() } _ => Err(Error::downcast(format!("{:?}", ret_value), "ObjectHandle")), } } } impl<'a, T: IsObject> From<ObjectPtr<T>> for ArgValue<'a> { fn from(object_ptr: ObjectPtr<T>) -> ArgValue<'a> { let raw_object_ptr = ObjectPtr::leak(object_ptr); let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) }; ArgValue::ObjectHandle(void_ptr) } } impl<'a, T: IsObject> TryFrom<ArgValue<'a>> for ObjectPtr<T> { type Error = Error; fn try_from(arg_value: ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> { match arg_value { ArgValue::ObjectHandle(handle) => { let handle = unsafe { std::mem::transmute(handle) }; let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?; optr.downcast() } _ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")), } } } impl<'a, T: IsObject> TryFrom<&ArgValue<'a>> for ObjectPtr<T> { type Error = Error; fn try_from(arg_value: &ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> { match arg_value { ArgValue::ObjectHandle(handle) => { let handle = unsafe { std::mem::transmute(handle) }; let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?; optr.downcast() } _ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")), } } } #[cfg(test)] mod tests { use super::{Object, ObjectPtr}; use anyhow::{ensure, Result}; use std::convert::TryInto; use tvm_sys::{ArgValue, RetValue}; #[test] fn test_new_object() -> anyhow::Result<()> { let object = Object::base_object::<Object>(); let ptr = ObjectPtr::new(object); assert_eq!(ptr.count(), 1); Ok(()) } #[test] fn roundtrip_retvalue() -> Result<()> { let ptr = ObjectPtr::new(Object::base_object::<Object>()); let ret_value: RetValue = ptr.clone().into(); let ptr2: ObjectPtr<Object> = ret_value.try_into()?; ensure!( ptr.type_index == ptr2.type_index, "type indices do not match" ); ensure!( ptr.fdeleter == ptr2.fdeleter, "objects have different deleters" ); Ok(()) } #[test] fn roundtrip_argvalue() -> Result<()> { let ptr = ObjectPtr::new(Object::base_object::<Object>()); let arg_value: ArgValue = ptr.clone().into(); let ptr2: ObjectPtr<Object> = arg_value.try_into()?; ensure!( ptr.type_index == ptr2.type_index, "type indices do not match" ); ensure!( ptr.fdeleter == ptr2.fdeleter, "objects have different deleters" ); Ok(()) } fn test_fn(o: ObjectPtr<Object>) -> ObjectPtr<Object> { assert_eq!(o.count(), 2); return o; } #[test] fn test_ref_count_boundary() { use super::*; use crate::function::{register, Function, Result}; let ptr = ObjectPtr::new(Object::base_object::<Object>()); let stay = ptr.clone(); assert_eq!(ptr.count(), 2); register(test_fn, "my_func").unwrap(); let func = Function::get("my_func").unwrap(); let func = func.to_boxed_fn::<dyn Fn(ObjectPtr<Object>) -> Result<ObjectPtr<Object>>>(); func(ptr).unwrap(); assert_eq!(stay.count(), 1); } }
downcast
identifier_name
object_ptr.rs
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ use std::convert::TryFrom; use std::ffi::CString; use std::ptr::NonNull; use std::sync::atomic::AtomicI32; use tvm_sys::ffi::{self, TVMObjectFree, TVMObjectRetain, TVMObjectTypeKey2Index}; use tvm_sys::{ArgValue, RetValue}; use crate::errors::Error; type Deleter = unsafe extern "C" fn(object: *mut Object) -> (); #[derive(Debug)] #[repr(C)] pub struct Object { pub type_index: u32, // TODO(@jroesch): pretty sure Rust and C++ atomics are the same, but not sure. // NB: in general we should not touch this in Rust. pub(self) ref_count: AtomicI32, pub fdeleter: Deleter, } unsafe extern "C" fn delete<T: IsObject>(object: *mut Object) { let typed_object: *mut T = std::mem::transmute(object); T::typed_delete(typed_object); } fn derived_from(child_type_index: u32, parent_type_index: u32) -> bool { let mut is_derived = 0; crate::check_call!(ffi::TVMObjectDerivedFrom( child_type_index, parent_type_index, &mut is_derived )); if is_derived == 0 { false } else { true } } impl Object { fn new(type_index: u32, deleter: Deleter) -> Object { Object { type_index, // Note: do not touch this field directly again, this is // a critical section, we write a 1 to the atomic which will now // be managed by the C++ atomics. // In the future we should probably use C-atomcis. ref_count: AtomicI32::new(0), fdeleter: deleter, } } fn get_type_index<T: IsObject>() -> u32 { let type_key = T::TYPE_KEY; let cstring = CString::new(type_key).expect("type key must not contain null characters"); if type_key == "Object" { return 0; } else { let mut index = 0; unsafe { let index_ptr = std::mem::transmute(&mut index); if TVMObjectTypeKey2Index(cstring.as_ptr(), index_ptr) != 0 { panic!(crate::get_last_error()) } } return index; } } pub fn base_object<T: IsObject>() -> Object { let index = Object::get_type_index::<T>(); Object::new(index, delete::<T>) } pub(self) fn inc_ref(&self) { unsafe { let raw_ptr = std::mem::transmute(self); assert_eq!(TVMObjectRetain(raw_ptr), 0); } } pub(self) fn dec_ref(&self) { unsafe { let raw_ptr = std::mem::transmute(self); assert_eq!(TVMObjectFree(raw_ptr), 0); } } } pub unsafe trait IsObject { const TYPE_KEY: &'static str; fn as_object<'s>(&'s self) -> &'s Object; unsafe extern "C" fn typed_delete(object: *mut Self) { let object = Box::from_raw(object); drop(object) } } unsafe impl IsObject for Object { const TYPE_KEY: &'static str = "Object"; fn as_object<'s>(&'s self) -> &'s Object { self } } #[repr(C)] pub struct ObjectPtr<T: IsObject> { pub ptr: NonNull<T>, } fn inc_ref<T: IsObject>(ptr: NonNull<T>) { unsafe { ptr.as_ref().as_object().inc_ref() } } fn dec_ref<T: IsObject>(ptr: NonNull<T>) { unsafe { ptr.as_ref().as_object().dec_ref() } } impl ObjectPtr<Object> { fn from_raw(object_ptr: *mut Object) -> Option<ObjectPtr<Object>> { let non_null = NonNull::new(object_ptr); non_null.map(|ptr| ObjectPtr { ptr }) } } impl<T: IsObject> Clone for ObjectPtr<T> { fn clone(&self) -> Self { inc_ref(self.ptr); ObjectPtr { ptr: self.ptr } } } impl<T: IsObject> Drop for ObjectPtr<T> { fn drop(&mut self) { dec_ref(self.ptr); } } impl<T: IsObject> ObjectPtr<T> { pub fn leak<'a>(object_ptr: ObjectPtr<T>) -> &'a mut T where T: 'a, { unsafe { &mut *std::mem::ManuallyDrop::new(object_ptr).ptr.as_ptr() } } pub fn new(object: T) -> ObjectPtr<T> { let object_ptr = Box::new(object); let object_ptr = Box::leak(object_ptr); let ptr = NonNull::from(object_ptr); inc_ref(ptr); ObjectPtr { ptr } } pub fn count(&self) -> i32 { // need to do atomic read in C++ // ABI compatible atomics is funky/hard. self.as_object() .ref_count .load(std::sync::atomic::Ordering::SeqCst) } fn as_object<'s>(&'s self) -> &'s Object
pub fn upcast(&self) -> ObjectPtr<Object> { ObjectPtr { ptr: self.ptr.cast(), } } pub fn downcast<U: IsObject>(&self) -> Result<ObjectPtr<U>, Error> { let child_index = Object::get_type_index::<U>(); let object_index = self.as_object().type_index; let is_derived = if child_index == object_index { true } else { // TODO(@jroesch): write tests derived_from(object_index, child_index) }; if is_derived { Ok(ObjectPtr { ptr: self.ptr.cast(), }) } else { Err(Error::downcast("TODOget_type_key".into(), U::TYPE_KEY)) } } } impl<T: IsObject> std::ops::Deref for ObjectPtr<T> { type Target = T; fn deref(&self) -> &Self::Target { unsafe { self.ptr.as_ref() } } } impl<'a, T: IsObject> From<ObjectPtr<T>> for RetValue { fn from(object_ptr: ObjectPtr<T>) -> RetValue { let raw_object_ptr = ObjectPtr::leak(object_ptr); let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) }; RetValue::ObjectHandle(void_ptr) } } impl<'a, T: IsObject> TryFrom<RetValue> for ObjectPtr<T> { type Error = Error; fn try_from(ret_value: RetValue) -> Result<ObjectPtr<T>, Self::Error> { match ret_value { RetValue::ObjectHandle(handle) => { let handle: *mut Object = unsafe { std::mem::transmute(handle) }; let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?; optr.downcast() } _ => Err(Error::downcast(format!("{:?}", ret_value), "ObjectHandle")), } } } impl<'a, T: IsObject> From<ObjectPtr<T>> for ArgValue<'a> { fn from(object_ptr: ObjectPtr<T>) -> ArgValue<'a> { let raw_object_ptr = ObjectPtr::leak(object_ptr); let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) }; ArgValue::ObjectHandle(void_ptr) } } impl<'a, T: IsObject> TryFrom<ArgValue<'a>> for ObjectPtr<T> { type Error = Error; fn try_from(arg_value: ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> { match arg_value { ArgValue::ObjectHandle(handle) => { let handle = unsafe { std::mem::transmute(handle) }; let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?; optr.downcast() } _ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")), } } } impl<'a, T: IsObject> TryFrom<&ArgValue<'a>> for ObjectPtr<T> { type Error = Error; fn try_from(arg_value: &ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> { match arg_value { ArgValue::ObjectHandle(handle) => { let handle = unsafe { std::mem::transmute(handle) }; let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?; optr.downcast() } _ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")), } } } #[cfg(test)] mod tests { use super::{Object, ObjectPtr}; use anyhow::{ensure, Result}; use std::convert::TryInto; use tvm_sys::{ArgValue, RetValue}; #[test] fn test_new_object() -> anyhow::Result<()> { let object = Object::base_object::<Object>(); let ptr = ObjectPtr::new(object); assert_eq!(ptr.count(), 1); Ok(()) } #[test] fn roundtrip_retvalue() -> Result<()> { let ptr = ObjectPtr::new(Object::base_object::<Object>()); let ret_value: RetValue = ptr.clone().into(); let ptr2: ObjectPtr<Object> = ret_value.try_into()?; ensure!( ptr.type_index == ptr2.type_index, "type indices do not match" ); ensure!( ptr.fdeleter == ptr2.fdeleter, "objects have different deleters" ); Ok(()) } #[test] fn roundtrip_argvalue() -> Result<()> { let ptr = ObjectPtr::new(Object::base_object::<Object>()); let arg_value: ArgValue = ptr.clone().into(); let ptr2: ObjectPtr<Object> = arg_value.try_into()?; ensure!( ptr.type_index == ptr2.type_index, "type indices do not match" ); ensure!( ptr.fdeleter == ptr2.fdeleter, "objects have different deleters" ); Ok(()) } fn test_fn(o: ObjectPtr<Object>) -> ObjectPtr<Object> { assert_eq!(o.count(), 2); return o; } #[test] fn test_ref_count_boundary() { use super::*; use crate::function::{register, Function, Result}; let ptr = ObjectPtr::new(Object::base_object::<Object>()); let stay = ptr.clone(); assert_eq!(ptr.count(), 2); register(test_fn, "my_func").unwrap(); let func = Function::get("my_func").unwrap(); let func = func.to_boxed_fn::<dyn Fn(ObjectPtr<Object>) -> Result<ObjectPtr<Object>>>(); func(ptr).unwrap(); assert_eq!(stay.count(), 1); } }
{ unsafe { self.ptr.as_ref().as_object() } }
identifier_body
object_ptr.rs
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ use std::convert::TryFrom; use std::ffi::CString; use std::ptr::NonNull; use std::sync::atomic::AtomicI32; use tvm_sys::ffi::{self, TVMObjectFree, TVMObjectRetain, TVMObjectTypeKey2Index}; use tvm_sys::{ArgValue, RetValue}; use crate::errors::Error; type Deleter = unsafe extern "C" fn(object: *mut Object) -> (); #[derive(Debug)] #[repr(C)] pub struct Object { pub type_index: u32, // TODO(@jroesch): pretty sure Rust and C++ atomics are the same, but not sure. // NB: in general we should not touch this in Rust. pub(self) ref_count: AtomicI32, pub fdeleter: Deleter, } unsafe extern "C" fn delete<T: IsObject>(object: *mut Object) { let typed_object: *mut T = std::mem::transmute(object); T::typed_delete(typed_object); } fn derived_from(child_type_index: u32, parent_type_index: u32) -> bool { let mut is_derived = 0; crate::check_call!(ffi::TVMObjectDerivedFrom( child_type_index, parent_type_index, &mut is_derived )); if is_derived == 0 { false } else { true } } impl Object { fn new(type_index: u32, deleter: Deleter) -> Object { Object { type_index, // Note: do not touch this field directly again, this is // a critical section, we write a 1 to the atomic which will now // be managed by the C++ atomics. // In the future we should probably use C-atomcis. ref_count: AtomicI32::new(0), fdeleter: deleter, } } fn get_type_index<T: IsObject>() -> u32 { let type_key = T::TYPE_KEY; let cstring = CString::new(type_key).expect("type key must not contain null characters"); if type_key == "Object" { return 0; } else { let mut index = 0; unsafe { let index_ptr = std::mem::transmute(&mut index); if TVMObjectTypeKey2Index(cstring.as_ptr(), index_ptr) != 0 { panic!(crate::get_last_error()) } } return index; } } pub fn base_object<T: IsObject>() -> Object { let index = Object::get_type_index::<T>(); Object::new(index, delete::<T>) } pub(self) fn inc_ref(&self) { unsafe { let raw_ptr = std::mem::transmute(self); assert_eq!(TVMObjectRetain(raw_ptr), 0); } } pub(self) fn dec_ref(&self) { unsafe { let raw_ptr = std::mem::transmute(self); assert_eq!(TVMObjectFree(raw_ptr), 0); } } } pub unsafe trait IsObject { const TYPE_KEY: &'static str; fn as_object<'s>(&'s self) -> &'s Object; unsafe extern "C" fn typed_delete(object: *mut Self) { let object = Box::from_raw(object); drop(object) } } unsafe impl IsObject for Object { const TYPE_KEY: &'static str = "Object"; fn as_object<'s>(&'s self) -> &'s Object { self } } #[repr(C)] pub struct ObjectPtr<T: IsObject> { pub ptr: NonNull<T>, } fn inc_ref<T: IsObject>(ptr: NonNull<T>) { unsafe { ptr.as_ref().as_object().inc_ref() } } fn dec_ref<T: IsObject>(ptr: NonNull<T>) { unsafe { ptr.as_ref().as_object().dec_ref() } } impl ObjectPtr<Object> { fn from_raw(object_ptr: *mut Object) -> Option<ObjectPtr<Object>> { let non_null = NonNull::new(object_ptr); non_null.map(|ptr| ObjectPtr { ptr }) } } impl<T: IsObject> Clone for ObjectPtr<T> { fn clone(&self) -> Self { inc_ref(self.ptr); ObjectPtr { ptr: self.ptr } } } impl<T: IsObject> Drop for ObjectPtr<T> { fn drop(&mut self) { dec_ref(self.ptr); } } impl<T: IsObject> ObjectPtr<T> { pub fn leak<'a>(object_ptr: ObjectPtr<T>) -> &'a mut T where T: 'a, { unsafe { &mut *std::mem::ManuallyDrop::new(object_ptr).ptr.as_ptr() } } pub fn new(object: T) -> ObjectPtr<T> { let object_ptr = Box::new(object); let object_ptr = Box::leak(object_ptr); let ptr = NonNull::from(object_ptr); inc_ref(ptr); ObjectPtr { ptr } } pub fn count(&self) -> i32 { // need to do atomic read in C++ // ABI compatible atomics is funky/hard. self.as_object() .ref_count .load(std::sync::atomic::Ordering::SeqCst) } fn as_object<'s>(&'s self) -> &'s Object { unsafe { self.ptr.as_ref().as_object() } } pub fn upcast(&self) -> ObjectPtr<Object> { ObjectPtr { ptr: self.ptr.cast(), } } pub fn downcast<U: IsObject>(&self) -> Result<ObjectPtr<U>, Error> { let child_index = Object::get_type_index::<U>(); let object_index = self.as_object().type_index; let is_derived = if child_index == object_index { true } else { // TODO(@jroesch): write tests derived_from(object_index, child_index) }; if is_derived { Ok(ObjectPtr { ptr: self.ptr.cast(), }) } else { Err(Error::downcast("TODOget_type_key".into(), U::TYPE_KEY)) } } } impl<T: IsObject> std::ops::Deref for ObjectPtr<T> { type Target = T; fn deref(&self) -> &Self::Target { unsafe { self.ptr.as_ref() } } } impl<'a, T: IsObject> From<ObjectPtr<T>> for RetValue { fn from(object_ptr: ObjectPtr<T>) -> RetValue { let raw_object_ptr = ObjectPtr::leak(object_ptr); let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) }; RetValue::ObjectHandle(void_ptr) } } impl<'a, T: IsObject> TryFrom<RetValue> for ObjectPtr<T> { type Error = Error; fn try_from(ret_value: RetValue) -> Result<ObjectPtr<T>, Self::Error> { match ret_value { RetValue::ObjectHandle(handle) => { let handle: *mut Object = unsafe { std::mem::transmute(handle) }; let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?; optr.downcast() } _ => Err(Error::downcast(format!("{:?}", ret_value), "ObjectHandle")), } } } impl<'a, T: IsObject> From<ObjectPtr<T>> for ArgValue<'a> { fn from(object_ptr: ObjectPtr<T>) -> ArgValue<'a> { let raw_object_ptr = ObjectPtr::leak(object_ptr); let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) }; ArgValue::ObjectHandle(void_ptr) } } impl<'a, T: IsObject> TryFrom<ArgValue<'a>> for ObjectPtr<T> { type Error = Error; fn try_from(arg_value: ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> { match arg_value { ArgValue::ObjectHandle(handle) => { let handle = unsafe { std::mem::transmute(handle) }; let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?; optr.downcast() } _ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")), } } } impl<'a, T: IsObject> TryFrom<&ArgValue<'a>> for ObjectPtr<T> { type Error = Error; fn try_from(arg_value: &ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> { match arg_value { ArgValue::ObjectHandle(handle) => { let handle = unsafe { std::mem::transmute(handle) }; let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?; optr.downcast() } _ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")), } } } #[cfg(test)] mod tests { use super::{Object, ObjectPtr}; use anyhow::{ensure, Result}; use std::convert::TryInto; use tvm_sys::{ArgValue, RetValue}; #[test] fn test_new_object() -> anyhow::Result<()> {
let object = Object::base_object::<Object>(); let ptr = ObjectPtr::new(object); assert_eq!(ptr.count(), 1); Ok(()) } #[test] fn roundtrip_retvalue() -> Result<()> { let ptr = ObjectPtr::new(Object::base_object::<Object>()); let ret_value: RetValue = ptr.clone().into(); let ptr2: ObjectPtr<Object> = ret_value.try_into()?; ensure!( ptr.type_index == ptr2.type_index, "type indices do not match" ); ensure!( ptr.fdeleter == ptr2.fdeleter, "objects have different deleters" ); Ok(()) } #[test] fn roundtrip_argvalue() -> Result<()> { let ptr = ObjectPtr::new(Object::base_object::<Object>()); let arg_value: ArgValue = ptr.clone().into(); let ptr2: ObjectPtr<Object> = arg_value.try_into()?; ensure!( ptr.type_index == ptr2.type_index, "type indices do not match" ); ensure!( ptr.fdeleter == ptr2.fdeleter, "objects have different deleters" ); Ok(()) } fn test_fn(o: ObjectPtr<Object>) -> ObjectPtr<Object> { assert_eq!(o.count(), 2); return o; } #[test] fn test_ref_count_boundary() { use super::*; use crate::function::{register, Function, Result}; let ptr = ObjectPtr::new(Object::base_object::<Object>()); let stay = ptr.clone(); assert_eq!(ptr.count(), 2); register(test_fn, "my_func").unwrap(); let func = Function::get("my_func").unwrap(); let func = func.to_boxed_fn::<dyn Fn(ObjectPtr<Object>) -> Result<ObjectPtr<Object>>>(); func(ptr).unwrap(); assert_eq!(stay.count(), 1); } }
random_line_split
object_ptr.rs
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ use std::convert::TryFrom; use std::ffi::CString; use std::ptr::NonNull; use std::sync::atomic::AtomicI32; use tvm_sys::ffi::{self, TVMObjectFree, TVMObjectRetain, TVMObjectTypeKey2Index}; use tvm_sys::{ArgValue, RetValue}; use crate::errors::Error; type Deleter = unsafe extern "C" fn(object: *mut Object) -> (); #[derive(Debug)] #[repr(C)] pub struct Object { pub type_index: u32, // TODO(@jroesch): pretty sure Rust and C++ atomics are the same, but not sure. // NB: in general we should not touch this in Rust. pub(self) ref_count: AtomicI32, pub fdeleter: Deleter, } unsafe extern "C" fn delete<T: IsObject>(object: *mut Object) { let typed_object: *mut T = std::mem::transmute(object); T::typed_delete(typed_object); } fn derived_from(child_type_index: u32, parent_type_index: u32) -> bool { let mut is_derived = 0; crate::check_call!(ffi::TVMObjectDerivedFrom( child_type_index, parent_type_index, &mut is_derived )); if is_derived == 0 { false } else
} impl Object { fn new(type_index: u32, deleter: Deleter) -> Object { Object { type_index, // Note: do not touch this field directly again, this is // a critical section, we write a 1 to the atomic which will now // be managed by the C++ atomics. // In the future we should probably use C-atomcis. ref_count: AtomicI32::new(0), fdeleter: deleter, } } fn get_type_index<T: IsObject>() -> u32 { let type_key = T::TYPE_KEY; let cstring = CString::new(type_key).expect("type key must not contain null characters"); if type_key == "Object" { return 0; } else { let mut index = 0; unsafe { let index_ptr = std::mem::transmute(&mut index); if TVMObjectTypeKey2Index(cstring.as_ptr(), index_ptr) != 0 { panic!(crate::get_last_error()) } } return index; } } pub fn base_object<T: IsObject>() -> Object { let index = Object::get_type_index::<T>(); Object::new(index, delete::<T>) } pub(self) fn inc_ref(&self) { unsafe { let raw_ptr = std::mem::transmute(self); assert_eq!(TVMObjectRetain(raw_ptr), 0); } } pub(self) fn dec_ref(&self) { unsafe { let raw_ptr = std::mem::transmute(self); assert_eq!(TVMObjectFree(raw_ptr), 0); } } } pub unsafe trait IsObject { const TYPE_KEY: &'static str; fn as_object<'s>(&'s self) -> &'s Object; unsafe extern "C" fn typed_delete(object: *mut Self) { let object = Box::from_raw(object); drop(object) } } unsafe impl IsObject for Object { const TYPE_KEY: &'static str = "Object"; fn as_object<'s>(&'s self) -> &'s Object { self } } #[repr(C)] pub struct ObjectPtr<T: IsObject> { pub ptr: NonNull<T>, } fn inc_ref<T: IsObject>(ptr: NonNull<T>) { unsafe { ptr.as_ref().as_object().inc_ref() } } fn dec_ref<T: IsObject>(ptr: NonNull<T>) { unsafe { ptr.as_ref().as_object().dec_ref() } } impl ObjectPtr<Object> { fn from_raw(object_ptr: *mut Object) -> Option<ObjectPtr<Object>> { let non_null = NonNull::new(object_ptr); non_null.map(|ptr| ObjectPtr { ptr }) } } impl<T: IsObject> Clone for ObjectPtr<T> { fn clone(&self) -> Self { inc_ref(self.ptr); ObjectPtr { ptr: self.ptr } } } impl<T: IsObject> Drop for ObjectPtr<T> { fn drop(&mut self) { dec_ref(self.ptr); } } impl<T: IsObject> ObjectPtr<T> { pub fn leak<'a>(object_ptr: ObjectPtr<T>) -> &'a mut T where T: 'a, { unsafe { &mut *std::mem::ManuallyDrop::new(object_ptr).ptr.as_ptr() } } pub fn new(object: T) -> ObjectPtr<T> { let object_ptr = Box::new(object); let object_ptr = Box::leak(object_ptr); let ptr = NonNull::from(object_ptr); inc_ref(ptr); ObjectPtr { ptr } } pub fn count(&self) -> i32 { // need to do atomic read in C++ // ABI compatible atomics is funky/hard. self.as_object() .ref_count .load(std::sync::atomic::Ordering::SeqCst) } fn as_object<'s>(&'s self) -> &'s Object { unsafe { self.ptr.as_ref().as_object() } } pub fn upcast(&self) -> ObjectPtr<Object> { ObjectPtr { ptr: self.ptr.cast(), } } pub fn downcast<U: IsObject>(&self) -> Result<ObjectPtr<U>, Error> { let child_index = Object::get_type_index::<U>(); let object_index = self.as_object().type_index; let is_derived = if child_index == object_index { true } else { // TODO(@jroesch): write tests derived_from(object_index, child_index) }; if is_derived { Ok(ObjectPtr { ptr: self.ptr.cast(), }) } else { Err(Error::downcast("TODOget_type_key".into(), U::TYPE_KEY)) } } } impl<T: IsObject> std::ops::Deref for ObjectPtr<T> { type Target = T; fn deref(&self) -> &Self::Target { unsafe { self.ptr.as_ref() } } } impl<'a, T: IsObject> From<ObjectPtr<T>> for RetValue { fn from(object_ptr: ObjectPtr<T>) -> RetValue { let raw_object_ptr = ObjectPtr::leak(object_ptr); let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) }; RetValue::ObjectHandle(void_ptr) } } impl<'a, T: IsObject> TryFrom<RetValue> for ObjectPtr<T> { type Error = Error; fn try_from(ret_value: RetValue) -> Result<ObjectPtr<T>, Self::Error> { match ret_value { RetValue::ObjectHandle(handle) => { let handle: *mut Object = unsafe { std::mem::transmute(handle) }; let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?; optr.downcast() } _ => Err(Error::downcast(format!("{:?}", ret_value), "ObjectHandle")), } } } impl<'a, T: IsObject> From<ObjectPtr<T>> for ArgValue<'a> { fn from(object_ptr: ObjectPtr<T>) -> ArgValue<'a> { let raw_object_ptr = ObjectPtr::leak(object_ptr); let void_ptr = unsafe { std::mem::transmute(raw_object_ptr) }; ArgValue::ObjectHandle(void_ptr) } } impl<'a, T: IsObject> TryFrom<ArgValue<'a>> for ObjectPtr<T> { type Error = Error; fn try_from(arg_value: ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> { match arg_value { ArgValue::ObjectHandle(handle) => { let handle = unsafe { std::mem::transmute(handle) }; let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?; optr.downcast() } _ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")), } } } impl<'a, T: IsObject> TryFrom<&ArgValue<'a>> for ObjectPtr<T> { type Error = Error; fn try_from(arg_value: &ArgValue<'a>) -> Result<ObjectPtr<T>, Self::Error> { match arg_value { ArgValue::ObjectHandle(handle) => { let handle = unsafe { std::mem::transmute(handle) }; let optr = ObjectPtr::from_raw(handle).ok_or(Error::Null)?; optr.downcast() } _ => Err(Error::downcast(format!("{:?}", arg_value), "ObjectHandle")), } } } #[cfg(test)] mod tests { use super::{Object, ObjectPtr}; use anyhow::{ensure, Result}; use std::convert::TryInto; use tvm_sys::{ArgValue, RetValue}; #[test] fn test_new_object() -> anyhow::Result<()> { let object = Object::base_object::<Object>(); let ptr = ObjectPtr::new(object); assert_eq!(ptr.count(), 1); Ok(()) } #[test] fn roundtrip_retvalue() -> Result<()> { let ptr = ObjectPtr::new(Object::base_object::<Object>()); let ret_value: RetValue = ptr.clone().into(); let ptr2: ObjectPtr<Object> = ret_value.try_into()?; ensure!( ptr.type_index == ptr2.type_index, "type indices do not match" ); ensure!( ptr.fdeleter == ptr2.fdeleter, "objects have different deleters" ); Ok(()) } #[test] fn roundtrip_argvalue() -> Result<()> { let ptr = ObjectPtr::new(Object::base_object::<Object>()); let arg_value: ArgValue = ptr.clone().into(); let ptr2: ObjectPtr<Object> = arg_value.try_into()?; ensure!( ptr.type_index == ptr2.type_index, "type indices do not match" ); ensure!( ptr.fdeleter == ptr2.fdeleter, "objects have different deleters" ); Ok(()) } fn test_fn(o: ObjectPtr<Object>) -> ObjectPtr<Object> { assert_eq!(o.count(), 2); return o; } #[test] fn test_ref_count_boundary() { use super::*; use crate::function::{register, Function, Result}; let ptr = ObjectPtr::new(Object::base_object::<Object>()); let stay = ptr.clone(); assert_eq!(ptr.count(), 2); register(test_fn, "my_func").unwrap(); let func = Function::get("my_func").unwrap(); let func = func.to_boxed_fn::<dyn Fn(ObjectPtr<Object>) -> Result<ObjectPtr<Object>>>(); func(ptr).unwrap(); assert_eq!(stay.count(), 1); } }
{ true }
conditional_block
FtpContext.js
"use strict"; const Socket = require("net").Socket; const parseControlResponse = require("./parseControlResponse"); /** * @typedef {Object} Task * @property {(...args: any[]) => void} resolve - Resolves the task. * @property {(...args: any[]) => void} reject - Rejects the task. */ /** * @typedef {(response: Object, task: Task) => void} ResponseHandler */ /** * FTPContext holds the control and data sockets of an FTP connection and provides a * simplified way to interact with an FTP server, handle responses, errors and timeouts. * * It doesn't implement or use any FTP commands. It's only a foundation to make writing an FTP * client as easy as possible. You won't usually instantiate this, but use `Client`. */ module.exports = class FTPContext { /** * Instantiate an FTP context. * * @param {number} [timeout=0] - Timeout in milliseconds to apply to control and data connections. Use 0 for no timeout. * @param {string} [encoding="utf8"] - Encoding to use for control connection. UTF-8 by default. Use "latin1" for older servers. */ constructor(timeout = 0, encoding = "utf8") { /** * Timeout applied to all connections. * @private * @type {number} */ this._timeout = timeout; /** * Current task to be resolved or rejected. * @private * @type {(Task | undefined)} */ this._task = undefined; /** * Function that handles incoming messages and resolves or rejects a task. * @private * @type {(ResponseHandler | undefined)} */ this._handler = undefined; /** * A multiline response might be received as multiple chunks. * @private * @type {string} */ this._partialResponse = ""; /** * The encoding used when reading from and writing to the control socket. * @type {string} */ this.encoding = encoding; /** * Options for TLS connections. * @type {import("tls").ConnectionOptions} */ this.tlsOptions = {}; /** * IP version to prefer (4: IPv4, 6: IPv6). * @type {(string | undefined)} */ this.ipFamily = undefined; /** * Log every communication detail. * @type {boolean} */ this.verbose = false; /** * The control connection to the FTP server. * @type {Socket} */ this.socket = new Socket(); /** * The current data connection to the FTP server. * @type {(Socket | undefined)} */ this.dataSocket = undefined; } /** * Close the context by resetting its state. */ close() { this._passToHandler({ error: { info: "User closed client during task." }}); this._reset(); } /** @type {Socket} */ get socket() { return this._socket; } /** * Set the socket for the control connection. This will only close the current control socket * if the new one is set to `undefined` because you're most likely to be upgrading an existing * control connection that continues to be used. * * @type {Socket} */ set socket(socket) { // No data socket should be open in any case where the control socket is set or upgraded. this.dataSocket = undefined; if (this._socket) { this._removeSocketListeners(this._socket); } if (socket) { // Don't set a timeout yet. Timeout for control sockets is only active during a task, see handle() below. socket.setTimeout(0); socket.setKeepAlive(true); socket.on("data", data => this._onControlSocketData(data)); this._setupErrorHandlers(socket, "control"); } else { this._closeSocket(this._socket); } this._socket = socket; } /** @type {(Socket | undefined)} */ get dataSocket() { return this._dataSocket; } /** * Set the socket for the data connection. This will automatically close the former data socket. * * @type {(Socket | undefined)} **/ set dataSocket(socket) { this._closeSocket(this._dataSocket); if (socket) { socket.setTimeout(this._timeout); this._setupErrorHandlers(socket, "data"); } this._dataSocket = socket; } /** * Send an FTP command without waiting for or handling the result. * * @param {string} command */ send(command) { // Don't log passwords. const message = command.startsWith("PASS") ? "> PASS ###" : `> ${command}`; this.log(message); this._socket.write(command + "\r\n", this.encoding); } /** * Log message if set to be verbose. * * @param {string} message */ log(message) { if (this.verbose) { console.log(message); } } /** * Enable timeout on the control socket connection. Disabling it can be useful if * a timeout should be caught by the current data connection instead of the * control connection that sits idle during transfers anyway. * * @param {boolean} enabled */ enableControlTimeout(enabled) { this.socket.setTimeout(enabled ? this._timeout : 0); } /** * Return true if the control socket is using TLS. This does not mean that a session * has already been negotiated. * * @returns {boolean} */ get hasTLS() { //@ts-ignore that not every socket has property encrypted. return this._socket && this._socket.encrypted === true; } /** * Send an FTP command and handle any response until the new task is resolved. This returns a Promise that * will hold whatever the handler passed on when resolving/rejecting its task. * * @param {string} command * @param {ResponseHandler} handler * @returns {Promise<any>} */ handle(command, handler) { if (this._handler !== undefined) { this.close(); throw new Error("There is still a task running. Did you forget to use '.then()' or 'await'?"); } // Only track control socket timeout during the lifecycle of a task associated with a handler. // That way we avoid timeouts on idle sockets, a behaviour that is not expected by most users. this.enableControlTimeout(true); return new Promise((resolvePromise, rejectPromise) => { this._handler = handler; this._task = { // When resolving or rejecting we also want the handler // to no longer receive any responses or errors. resolve: (...args) => { this._stopTrackingTask(); resolvePromise(...args); }, reject: (...args) => { this._stopTrackingTask(); rejectPromise(...args); } }; if (command !== undefined)
}); } /** * Removes reference to current task and handler. This won't resolve or reject the task. */ _stopTrackingTask() { // Disable timeout on control socket if there is no task active. this.enableControlTimeout(false); this._task = undefined; this._handler = undefined; } /** * Handle incoming data on the control socket. * * @private * @param {Buffer} data */ _onControlSocketData(data) { let response = data.toString(this.encoding).trim(); this.log(`< ${response}`); // This response might complete an earlier partial response. response = this._partialResponse + response; const parsed = parseControlResponse(response); // Remember any incomplete remainder. this._partialResponse = parsed.rest; // Each response group is passed along individually. for (const message of parsed.messages) { const code = parseInt(message.substr(0, 3), 10); this._passToHandler({ code, message }); } } /** * Send the current handler a response. This is usually a control socket response * or a socket event, like an error or timeout. * * @private * @param {Object} response */ _passToHandler(response) { if (this._handler) { this._handler(response, this._task); } } /** * Reset the state of this context. * * @private */ _reset() { this.log("Closing connections."); this._stopTrackingTask(); this._partialResponse = ""; this._closeSocket(this._socket); this._closeSocket(this._dataSocket); // Set a new socket instance to make reconnecting possible. this.socket = new Socket(); } /** * Send an error to the current handler and close all connections. * * @private * @param {*} error */ _closeWithError(error) { this.log(error); this._passToHandler({ error }); this._reset(); } /** * Close a socket. * * @private * @param {(Socket | undefined)} socket */ _closeSocket(socket) { if (socket) { socket.destroy(); this._removeSocketListeners(socket); } } /** * Setup all error handlers for a socket. * * @private * @param {Socket} socket * @param {string} identifier */ _setupErrorHandlers(socket, identifier) { socket.once("error", error => this._closeWithError({ ...error, ftpSocket: identifier })); socket.once("timeout", () => this._closeWithError({ info: "socket timeout", ftpSocket: identifier })); socket.once("close", hadError => { if (hadError) { this._closeWithError({ info: "socket closed due to transmission error", ftpSocket: identifier}); } }); } /** * Remove all default listeners for socket. * * @private * @param {Socket} socket */ _removeSocketListeners(socket) { // socket.removeAllListeners() without name doesn't work: https://github.com/nodejs/node/issues/20923 socket.removeAllListeners("timeout"); socket.removeAllListeners("data"); socket.removeAllListeners("error"); socket.removeAllListeners("close"); socket.removeAllListeners("connect"); } };
{ this.send(command); }
conditional_block
FtpContext.js
"use strict"; const Socket = require("net").Socket; const parseControlResponse = require("./parseControlResponse"); /** * @typedef {Object} Task * @property {(...args: any[]) => void} resolve - Resolves the task. * @property {(...args: any[]) => void} reject - Rejects the task. */ /** * @typedef {(response: Object, task: Task) => void} ResponseHandler */ /** * FTPContext holds the control and data sockets of an FTP connection and provides a * simplified way to interact with an FTP server, handle responses, errors and timeouts. * * It doesn't implement or use any FTP commands. It's only a foundation to make writing an FTP * client as easy as possible. You won't usually instantiate this, but use `Client`. */ module.exports = class FTPContext { /** * Instantiate an FTP context. * * @param {number} [timeout=0] - Timeout in milliseconds to apply to control and data connections. Use 0 for no timeout. * @param {string} [encoding="utf8"] - Encoding to use for control connection. UTF-8 by default. Use "latin1" for older servers. */ constructor(timeout = 0, encoding = "utf8") { /** * Timeout applied to all connections. * @private * @type {number} */ this._timeout = timeout; /** * Current task to be resolved or rejected. * @private * @type {(Task | undefined)} */ this._task = undefined; /** * Function that handles incoming messages and resolves or rejects a task. * @private * @type {(ResponseHandler | undefined)} */ this._handler = undefined; /** * A multiline response might be received as multiple chunks. * @private * @type {string} */ this._partialResponse = ""; /** * The encoding used when reading from and writing to the control socket. * @type {string} */ this.encoding = encoding; /** * Options for TLS connections. * @type {import("tls").ConnectionOptions} */ this.tlsOptions = {}; /** * IP version to prefer (4: IPv4, 6: IPv6). * @type {(string | undefined)} */ this.ipFamily = undefined; /** * Log every communication detail. * @type {boolean} */ this.verbose = false; /** * The control connection to the FTP server. * @type {Socket} */ this.socket = new Socket(); /** * The current data connection to the FTP server. * @type {(Socket | undefined)} */ this.dataSocket = undefined; } /** * Close the context by resetting its state. */ close() { this._passToHandler({ error: { info: "User closed client during task." }}); this._reset(); } /** @type {Socket} */ get
() { return this._socket; } /** * Set the socket for the control connection. This will only close the current control socket * if the new one is set to `undefined` because you're most likely to be upgrading an existing * control connection that continues to be used. * * @type {Socket} */ set socket(socket) { // No data socket should be open in any case where the control socket is set or upgraded. this.dataSocket = undefined; if (this._socket) { this._removeSocketListeners(this._socket); } if (socket) { // Don't set a timeout yet. Timeout for control sockets is only active during a task, see handle() below. socket.setTimeout(0); socket.setKeepAlive(true); socket.on("data", data => this._onControlSocketData(data)); this._setupErrorHandlers(socket, "control"); } else { this._closeSocket(this._socket); } this._socket = socket; } /** @type {(Socket | undefined)} */ get dataSocket() { return this._dataSocket; } /** * Set the socket for the data connection. This will automatically close the former data socket. * * @type {(Socket | undefined)} **/ set dataSocket(socket) { this._closeSocket(this._dataSocket); if (socket) { socket.setTimeout(this._timeout); this._setupErrorHandlers(socket, "data"); } this._dataSocket = socket; } /** * Send an FTP command without waiting for or handling the result. * * @param {string} command */ send(command) { // Don't log passwords. const message = command.startsWith("PASS") ? "> PASS ###" : `> ${command}`; this.log(message); this._socket.write(command + "\r\n", this.encoding); } /** * Log message if set to be verbose. * * @param {string} message */ log(message) { if (this.verbose) { console.log(message); } } /** * Enable timeout on the control socket connection. Disabling it can be useful if * a timeout should be caught by the current data connection instead of the * control connection that sits idle during transfers anyway. * * @param {boolean} enabled */ enableControlTimeout(enabled) { this.socket.setTimeout(enabled ? this._timeout : 0); } /** * Return true if the control socket is using TLS. This does not mean that a session * has already been negotiated. * * @returns {boolean} */ get hasTLS() { //@ts-ignore that not every socket has property encrypted. return this._socket && this._socket.encrypted === true; } /** * Send an FTP command and handle any response until the new task is resolved. This returns a Promise that * will hold whatever the handler passed on when resolving/rejecting its task. * * @param {string} command * @param {ResponseHandler} handler * @returns {Promise<any>} */ handle(command, handler) { if (this._handler !== undefined) { this.close(); throw new Error("There is still a task running. Did you forget to use '.then()' or 'await'?"); } // Only track control socket timeout during the lifecycle of a task associated with a handler. // That way we avoid timeouts on idle sockets, a behaviour that is not expected by most users. this.enableControlTimeout(true); return new Promise((resolvePromise, rejectPromise) => { this._handler = handler; this._task = { // When resolving or rejecting we also want the handler // to no longer receive any responses or errors. resolve: (...args) => { this._stopTrackingTask(); resolvePromise(...args); }, reject: (...args) => { this._stopTrackingTask(); rejectPromise(...args); } }; if (command !== undefined) { this.send(command); } }); } /** * Removes reference to current task and handler. This won't resolve or reject the task. */ _stopTrackingTask() { // Disable timeout on control socket if there is no task active. this.enableControlTimeout(false); this._task = undefined; this._handler = undefined; } /** * Handle incoming data on the control socket. * * @private * @param {Buffer} data */ _onControlSocketData(data) { let response = data.toString(this.encoding).trim(); this.log(`< ${response}`); // This response might complete an earlier partial response. response = this._partialResponse + response; const parsed = parseControlResponse(response); // Remember any incomplete remainder. this._partialResponse = parsed.rest; // Each response group is passed along individually. for (const message of parsed.messages) { const code = parseInt(message.substr(0, 3), 10); this._passToHandler({ code, message }); } } /** * Send the current handler a response. This is usually a control socket response * or a socket event, like an error or timeout. * * @private * @param {Object} response */ _passToHandler(response) { if (this._handler) { this._handler(response, this._task); } } /** * Reset the state of this context. * * @private */ _reset() { this.log("Closing connections."); this._stopTrackingTask(); this._partialResponse = ""; this._closeSocket(this._socket); this._closeSocket(this._dataSocket); // Set a new socket instance to make reconnecting possible. this.socket = new Socket(); } /** * Send an error to the current handler and close all connections. * * @private * @param {*} error */ _closeWithError(error) { this.log(error); this._passToHandler({ error }); this._reset(); } /** * Close a socket. * * @private * @param {(Socket | undefined)} socket */ _closeSocket(socket) { if (socket) { socket.destroy(); this._removeSocketListeners(socket); } } /** * Setup all error handlers for a socket. * * @private * @param {Socket} socket * @param {string} identifier */ _setupErrorHandlers(socket, identifier) { socket.once("error", error => this._closeWithError({ ...error, ftpSocket: identifier })); socket.once("timeout", () => this._closeWithError({ info: "socket timeout", ftpSocket: identifier })); socket.once("close", hadError => { if (hadError) { this._closeWithError({ info: "socket closed due to transmission error", ftpSocket: identifier}); } }); } /** * Remove all default listeners for socket. * * @private * @param {Socket} socket */ _removeSocketListeners(socket) { // socket.removeAllListeners() without name doesn't work: https://github.com/nodejs/node/issues/20923 socket.removeAllListeners("timeout"); socket.removeAllListeners("data"); socket.removeAllListeners("error"); socket.removeAllListeners("close"); socket.removeAllListeners("connect"); } };
socket
identifier_name
FtpContext.js
"use strict"; const Socket = require("net").Socket; const parseControlResponse = require("./parseControlResponse"); /** * @typedef {Object} Task * @property {(...args: any[]) => void} resolve - Resolves the task. * @property {(...args: any[]) => void} reject - Rejects the task. */ /** * @typedef {(response: Object, task: Task) => void} ResponseHandler */ /** * FTPContext holds the control and data sockets of an FTP connection and provides a * simplified way to interact with an FTP server, handle responses, errors and timeouts. * * It doesn't implement or use any FTP commands. It's only a foundation to make writing an FTP * client as easy as possible. You won't usually instantiate this, but use `Client`. */ module.exports = class FTPContext { /** * Instantiate an FTP context. * * @param {number} [timeout=0] - Timeout in milliseconds to apply to control and data connections. Use 0 for no timeout. * @param {string} [encoding="utf8"] - Encoding to use for control connection. UTF-8 by default. Use "latin1" for older servers. */ constructor(timeout = 0, encoding = "utf8")
/** * Close the context by resetting its state. */ close() { this._passToHandler({ error: { info: "User closed client during task." }}); this._reset(); } /** @type {Socket} */ get socket() { return this._socket; } /** * Set the socket for the control connection. This will only close the current control socket * if the new one is set to `undefined` because you're most likely to be upgrading an existing * control connection that continues to be used. * * @type {Socket} */ set socket(socket) { // No data socket should be open in any case where the control socket is set or upgraded. this.dataSocket = undefined; if (this._socket) { this._removeSocketListeners(this._socket); } if (socket) { // Don't set a timeout yet. Timeout for control sockets is only active during a task, see handle() below. socket.setTimeout(0); socket.setKeepAlive(true); socket.on("data", data => this._onControlSocketData(data)); this._setupErrorHandlers(socket, "control"); } else { this._closeSocket(this._socket); } this._socket = socket; } /** @type {(Socket | undefined)} */ get dataSocket() { return this._dataSocket; } /** * Set the socket for the data connection. This will automatically close the former data socket. * * @type {(Socket | undefined)} **/ set dataSocket(socket) { this._closeSocket(this._dataSocket); if (socket) { socket.setTimeout(this._timeout); this._setupErrorHandlers(socket, "data"); } this._dataSocket = socket; } /** * Send an FTP command without waiting for or handling the result. * * @param {string} command */ send(command) { // Don't log passwords. const message = command.startsWith("PASS") ? "> PASS ###" : `> ${command}`; this.log(message); this._socket.write(command + "\r\n", this.encoding); } /** * Log message if set to be verbose. * * @param {string} message */ log(message) { if (this.verbose) { console.log(message); } } /** * Enable timeout on the control socket connection. Disabling it can be useful if * a timeout should be caught by the current data connection instead of the * control connection that sits idle during transfers anyway. * * @param {boolean} enabled */ enableControlTimeout(enabled) { this.socket.setTimeout(enabled ? this._timeout : 0); } /** * Return true if the control socket is using TLS. This does not mean that a session * has already been negotiated. * * @returns {boolean} */ get hasTLS() { //@ts-ignore that not every socket has property encrypted. return this._socket && this._socket.encrypted === true; } /** * Send an FTP command and handle any response until the new task is resolved. This returns a Promise that * will hold whatever the handler passed on when resolving/rejecting its task. * * @param {string} command * @param {ResponseHandler} handler * @returns {Promise<any>} */ handle(command, handler) { if (this._handler !== undefined) { this.close(); throw new Error("There is still a task running. Did you forget to use '.then()' or 'await'?"); } // Only track control socket timeout during the lifecycle of a task associated with a handler. // That way we avoid timeouts on idle sockets, a behaviour that is not expected by most users. this.enableControlTimeout(true); return new Promise((resolvePromise, rejectPromise) => { this._handler = handler; this._task = { // When resolving or rejecting we also want the handler // to no longer receive any responses or errors. resolve: (...args) => { this._stopTrackingTask(); resolvePromise(...args); }, reject: (...args) => { this._stopTrackingTask(); rejectPromise(...args); } }; if (command !== undefined) { this.send(command); } }); } /** * Removes reference to current task and handler. This won't resolve or reject the task. */ _stopTrackingTask() { // Disable timeout on control socket if there is no task active. this.enableControlTimeout(false); this._task = undefined; this._handler = undefined; } /** * Handle incoming data on the control socket. * * @private * @param {Buffer} data */ _onControlSocketData(data) { let response = data.toString(this.encoding).trim(); this.log(`< ${response}`); // This response might complete an earlier partial response. response = this._partialResponse + response; const parsed = parseControlResponse(response); // Remember any incomplete remainder. this._partialResponse = parsed.rest; // Each response group is passed along individually. for (const message of parsed.messages) { const code = parseInt(message.substr(0, 3), 10); this._passToHandler({ code, message }); } } /** * Send the current handler a response. This is usually a control socket response * or a socket event, like an error or timeout. * * @private * @param {Object} response */ _passToHandler(response) { if (this._handler) { this._handler(response, this._task); } } /** * Reset the state of this context. * * @private */ _reset() { this.log("Closing connections."); this._stopTrackingTask(); this._partialResponse = ""; this._closeSocket(this._socket); this._closeSocket(this._dataSocket); // Set a new socket instance to make reconnecting possible. this.socket = new Socket(); } /** * Send an error to the current handler and close all connections. * * @private * @param {*} error */ _closeWithError(error) { this.log(error); this._passToHandler({ error }); this._reset(); } /** * Close a socket. * * @private * @param {(Socket | undefined)} socket */ _closeSocket(socket) { if (socket) { socket.destroy(); this._removeSocketListeners(socket); } } /** * Setup all error handlers for a socket. * * @private * @param {Socket} socket * @param {string} identifier */ _setupErrorHandlers(socket, identifier) { socket.once("error", error => this._closeWithError({ ...error, ftpSocket: identifier })); socket.once("timeout", () => this._closeWithError({ info: "socket timeout", ftpSocket: identifier })); socket.once("close", hadError => { if (hadError) { this._closeWithError({ info: "socket closed due to transmission error", ftpSocket: identifier}); } }); } /** * Remove all default listeners for socket. * * @private * @param {Socket} socket */ _removeSocketListeners(socket) { // socket.removeAllListeners() without name doesn't work: https://github.com/nodejs/node/issues/20923 socket.removeAllListeners("timeout"); socket.removeAllListeners("data"); socket.removeAllListeners("error"); socket.removeAllListeners("close"); socket.removeAllListeners("connect"); } };
{ /** * Timeout applied to all connections. * @private * @type {number} */ this._timeout = timeout; /** * Current task to be resolved or rejected. * @private * @type {(Task | undefined)} */ this._task = undefined; /** * Function that handles incoming messages and resolves or rejects a task. * @private * @type {(ResponseHandler | undefined)} */ this._handler = undefined; /** * A multiline response might be received as multiple chunks. * @private * @type {string} */ this._partialResponse = ""; /** * The encoding used when reading from and writing to the control socket. * @type {string} */ this.encoding = encoding; /** * Options for TLS connections. * @type {import("tls").ConnectionOptions} */ this.tlsOptions = {}; /** * IP version to prefer (4: IPv4, 6: IPv6). * @type {(string | undefined)} */ this.ipFamily = undefined; /** * Log every communication detail. * @type {boolean} */ this.verbose = false; /** * The control connection to the FTP server. * @type {Socket} */ this.socket = new Socket(); /** * The current data connection to the FTP server. * @type {(Socket | undefined)} */ this.dataSocket = undefined; }
identifier_body
FtpContext.js
"use strict"; const Socket = require("net").Socket; const parseControlResponse = require("./parseControlResponse"); /** * @typedef {Object} Task * @property {(...args: any[]) => void} resolve - Resolves the task. * @property {(...args: any[]) => void} reject - Rejects the task. */ /** * @typedef {(response: Object, task: Task) => void} ResponseHandler */ /** * FTPContext holds the control and data sockets of an FTP connection and provides a * simplified way to interact with an FTP server, handle responses, errors and timeouts. * * It doesn't implement or use any FTP commands. It's only a foundation to make writing an FTP * client as easy as possible. You won't usually instantiate this, but use `Client`. */ module.exports = class FTPContext { /** * Instantiate an FTP context. * * @param {number} [timeout=0] - Timeout in milliseconds to apply to control and data connections. Use 0 for no timeout. * @param {string} [encoding="utf8"] - Encoding to use for control connection. UTF-8 by default. Use "latin1" for older servers. */ constructor(timeout = 0, encoding = "utf8") { /** * Timeout applied to all connections. * @private * @type {number} */ this._timeout = timeout; /** * Current task to be resolved or rejected. * @private * @type {(Task | undefined)} */ this._task = undefined; /** * Function that handles incoming messages and resolves or rejects a task. * @private * @type {(ResponseHandler | undefined)} */ this._handler = undefined; /** * A multiline response might be received as multiple chunks. * @private * @type {string} */ this._partialResponse = ""; /** * The encoding used when reading from and writing to the control socket. * @type {string} */ this.encoding = encoding; /** * Options for TLS connections. * @type {import("tls").ConnectionOptions} */ this.tlsOptions = {}; /** * IP version to prefer (4: IPv4, 6: IPv6). * @type {(string | undefined)} */ this.ipFamily = undefined; /** * Log every communication detail. * @type {boolean} */ this.verbose = false; /** * The control connection to the FTP server. * @type {Socket} */ this.socket = new Socket(); /** * The current data connection to the FTP server. * @type {(Socket | undefined)} */ this.dataSocket = undefined; } /** * Close the context by resetting its state. */ close() { this._passToHandler({ error: { info: "User closed client during task." }}); this._reset(); } /** @type {Socket} */ get socket() { return this._socket; } /** * Set the socket for the control connection. This will only close the current control socket * if the new one is set to `undefined` because you're most likely to be upgrading an existing * control connection that continues to be used. * * @type {Socket} */ set socket(socket) { // No data socket should be open in any case where the control socket is set or upgraded. this.dataSocket = undefined; if (this._socket) { this._removeSocketListeners(this._socket); } if (socket) { // Don't set a timeout yet. Timeout for control sockets is only active during a task, see handle() below. socket.setTimeout(0); socket.setKeepAlive(true); socket.on("data", data => this._onControlSocketData(data)); this._setupErrorHandlers(socket, "control"); } else { this._closeSocket(this._socket); } this._socket = socket; } /** @type {(Socket | undefined)} */ get dataSocket() {
} /** * Set the socket for the data connection. This will automatically close the former data socket. * * @type {(Socket | undefined)} **/ set dataSocket(socket) { this._closeSocket(this._dataSocket); if (socket) { socket.setTimeout(this._timeout); this._setupErrorHandlers(socket, "data"); } this._dataSocket = socket; } /** * Send an FTP command without waiting for or handling the result. * * @param {string} command */ send(command) { // Don't log passwords. const message = command.startsWith("PASS") ? "> PASS ###" : `> ${command}`; this.log(message); this._socket.write(command + "\r\n", this.encoding); } /** * Log message if set to be verbose. * * @param {string} message */ log(message) { if (this.verbose) { console.log(message); } } /** * Enable timeout on the control socket connection. Disabling it can be useful if * a timeout should be caught by the current data connection instead of the * control connection that sits idle during transfers anyway. * * @param {boolean} enabled */ enableControlTimeout(enabled) { this.socket.setTimeout(enabled ? this._timeout : 0); } /** * Return true if the control socket is using TLS. This does not mean that a session * has already been negotiated. * * @returns {boolean} */ get hasTLS() { //@ts-ignore that not every socket has property encrypted. return this._socket && this._socket.encrypted === true; } /** * Send an FTP command and handle any response until the new task is resolved. This returns a Promise that * will hold whatever the handler passed on when resolving/rejecting its task. * * @param {string} command * @param {ResponseHandler} handler * @returns {Promise<any>} */ handle(command, handler) { if (this._handler !== undefined) { this.close(); throw new Error("There is still a task running. Did you forget to use '.then()' or 'await'?"); } // Only track control socket timeout during the lifecycle of a task associated with a handler. // That way we avoid timeouts on idle sockets, a behaviour that is not expected by most users. this.enableControlTimeout(true); return new Promise((resolvePromise, rejectPromise) => { this._handler = handler; this._task = { // When resolving or rejecting we also want the handler // to no longer receive any responses or errors. resolve: (...args) => { this._stopTrackingTask(); resolvePromise(...args); }, reject: (...args) => { this._stopTrackingTask(); rejectPromise(...args); } }; if (command !== undefined) { this.send(command); } }); } /** * Removes reference to current task and handler. This won't resolve or reject the task. */ _stopTrackingTask() { // Disable timeout on control socket if there is no task active. this.enableControlTimeout(false); this._task = undefined; this._handler = undefined; } /** * Handle incoming data on the control socket. * * @private * @param {Buffer} data */ _onControlSocketData(data) { let response = data.toString(this.encoding).trim(); this.log(`< ${response}`); // This response might complete an earlier partial response. response = this._partialResponse + response; const parsed = parseControlResponse(response); // Remember any incomplete remainder. this._partialResponse = parsed.rest; // Each response group is passed along individually. for (const message of parsed.messages) { const code = parseInt(message.substr(0, 3), 10); this._passToHandler({ code, message }); } } /** * Send the current handler a response. This is usually a control socket response * or a socket event, like an error or timeout. * * @private * @param {Object} response */ _passToHandler(response) { if (this._handler) { this._handler(response, this._task); } } /** * Reset the state of this context. * * @private */ _reset() { this.log("Closing connections."); this._stopTrackingTask(); this._partialResponse = ""; this._closeSocket(this._socket); this._closeSocket(this._dataSocket); // Set a new socket instance to make reconnecting possible. this.socket = new Socket(); } /** * Send an error to the current handler and close all connections. * * @private * @param {*} error */ _closeWithError(error) { this.log(error); this._passToHandler({ error }); this._reset(); } /** * Close a socket. * * @private * @param {(Socket | undefined)} socket */ _closeSocket(socket) { if (socket) { socket.destroy(); this._removeSocketListeners(socket); } } /** * Setup all error handlers for a socket. * * @private * @param {Socket} socket * @param {string} identifier */ _setupErrorHandlers(socket, identifier) { socket.once("error", error => this._closeWithError({ ...error, ftpSocket: identifier })); socket.once("timeout", () => this._closeWithError({ info: "socket timeout", ftpSocket: identifier })); socket.once("close", hadError => { if (hadError) { this._closeWithError({ info: "socket closed due to transmission error", ftpSocket: identifier}); } }); } /** * Remove all default listeners for socket. * * @private * @param {Socket} socket */ _removeSocketListeners(socket) { // socket.removeAllListeners() without name doesn't work: https://github.com/nodejs/node/issues/20923 socket.removeAllListeners("timeout"); socket.removeAllListeners("data"); socket.removeAllListeners("error"); socket.removeAllListeners("close"); socket.removeAllListeners("connect"); } };
return this._dataSocket;
random_line_split
custom_fingers.py
""" 6.12.2014 by real Testing the idea of routing in a mesh using a Virtual DHT. Inspired by "Pushing Chord into the Underlay". """ import random import heapq import bisect from collections import namedtuple # Number of bits in ident number: IDENT_BITS = 40 # Maximum possible identity value. # Note that this value isn't really the maximum. It is maximum + 1. MAX_IDENT = 2**IDENT_BITS # Fingers we are interested in: SUCC_FINGERS = [0] PRED_FINGERS = [0] # SUCC_FINGNERS = list(range(IDENT_BITS)) # PRED_FINGERS = list(range(IDENT_BITS)) # A named tuple for Known node: # path_len is the path length source node, # ident is the identity value of the Known node. # lindex is the list index of the Known node. Knode = namedtuple('Knode', ['path_len', 'ident','lindex']) def rand_ident(): """ Generate random identity in the range [0,MAX_IDENT) """ return random.randrange(MAX_IDENT) def dist_ident(x,y): """ Distance between two nodes (According to ident): """ return (y - x) % MAX_IDENT
def remove_knodes_duplicates(knodes): """ Go over a list of knodes, and remove knodes that show up more than once. In case of node ident showing more than once, we pick the shorter path. """ if len(knodes) == 0: return knodes knodes.sort(key=lambda kn:(kn.ident,kn.path_len)) # Resulting array cur_ident = knodes[0].ident res = [knodes[0]] for kn in knodes[1:]: if kn.ident != cur_ident: cur_ident = kn.ident res.append(kn) return res # A node: class Node(): def __init__(self,fk,ident=None): """ Initialize a node. """ # If ident value is not specified, we randomize one: if ident is None: self.ident = rand_ident() else: self.ident = ident # Argument related to amount of known best finger candidates. self.fk = fk # Initialize list of known nodes: self.neighbours = [] self.best_finger_succ = [list() for f in range(IDENT_BITS)] self.best_finger_pred = [list() for f in range(IDENT_BITS)] def get_finger_succ_loc(self,f): """ Get the exact location of successor finger f. """ return (self.ident + 2**f) % MAX_IDENT def get_finger_pred_loc(self,f): """ Get the exact location of predecessor finger f. """ return (self.ident - 2**f) % MAX_IDENT def set_neighbours(self,knodes): """ set knodes to be the neighbours of this Node. """ self.neighbours = [] for kn in knodes: # Make sure we don't have ourselves as a neighbour: if kn.ident == self.ident: continue # A neighbour has a path length 1: self.neighbours.append(\ kn._replace(path_len=1)) # Update known nodes: self.add_known_nodes(0,self.neighbours) def add_known_best_finger_succ(self,f,knodes): """ If any of the nodes in knodes is a better candidate for the f's successor finger, we replace. """ pool = remove_knodes_duplicates(self.best_finger_succ[f] + knodes) self.best_finger_succ[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\ (dist_ident(self.get_finger_succ_loc(f),kn.ident),kn.path_len)) def add_known_best_finger_pred(self,f,knodes): """ If any of the nodes in knodes is a better candidate for the f's predecessor finger, we replace. """ pool = remove_knodes_duplicates(self.best_finger_pred[f] + knodes) self.best_finger_pred[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\ (dist_ident(kn.ident,self.get_finger_pred_loc(f)),kn.path_len)) def add_known_nodes(self,source_path_len,knodes): """ Add a set of known nodes to self.known . Take the change of path_len into acount. """ # Update the path lengths: updated_knodes = [kn._replace(path_len=kn.path_len+source_path_len)\ for kn in knodes] # Make sure the node self.ident is not inside: updated_knodes = list(filter(lambda kn:kn.ident != self.ident,\ updated_knodes)) for f in SUCC_FINGERS: self.add_known_best_finger_succ(f,updated_knodes) for f in PRED_FINGERS: self.add_known_best_finger_pred(f,updated_knodes) def get_known(self): """ Return a list of all known nodes. Items in the list are unique. """ pool = set() # Add neighbours: pool.update(self.neighbours) # Add fingers: for f in SUCC_FINGERS: pool.update(self.best_finger_succ[f]) for f in PRED_FINGERS: pool.update(self.best_finger_pred[f]) return list(pool) def get_close(self): """ Return a list of the closest known nodes. Close in the virtual sense, to self.ident, and to the possible fingers on the Chord DHT. """ pool = set() for f in SUCC_FINGERS: pool.update(self.best_finger_succ[f]) for f in PRED_FINGERS: pool.update(self.best_finger_pred[f]) return list(pool) def get_best_succ_finger(self,f): """ Get the best successor for finger f. """ return min(self.best_finger_succ[f],\ key=lambda kn:dist_ident(self.get_finger_succ_loc(f),kn.ident)) def get_best_pred_finger(self,f): """ Get the best predecessor for finger f. """ return min(self.best_finger_pred[f],\ key=lambda kn:dist_ident(kn.ident,self.get_finger_pred_loc(f))) # Simulation for a mesh network with Virtual DHT abilities: class VirtualDHT(): def __init__(self,n,fk,nei): # Amount of nodes: self.num_nodes = n # Half amount of neighbours per node: self.nei = nei # Known finger nodes parameter: self.fk = fk # Generate nodes and neighbours links: self.gen_nodes() self.rand_neighbours() def gen_nodes(self): """ Generate n nodes with random identity numbers. """ self.nodes = [] for i in range(self.num_nodes): self.nodes.append(Node(self.fk)) def make_knode(self,i,path_len=0): """ Given an index i of a node in self.nodes, create a Knode tuple. Optionally set path_len. """ return Knode(path_len=path_len,\ ident=self.nodes[i].ident,\ lindex=i) def rand_neighbours(self): """ Randomize immediate neighbours links between the nodes. """ # Initialize neighbours sets as empty sets: nodes_nei = [set() for _ in range(self.num_nodes)] for i,nd in enumerate(self.nodes): # Sample a set of indices (Which represent a set of nodes). # Those nodes will be nd's neighbours: nodes_nei[i].update(\ random.sample(range(self.num_nodes),self.nei)) # Remove myself: nodes_nei[i].discard(i) # To make the graph undirected, we add i to be neighbour of all # i's neighbours: for j in nodes_nei[i]: nodes_nei[j].add(i) for i,nd in enumerate(self.nodes): # Initialize a list of neighbours: nd.set_neighbours(map(self.make_knode,list(nodes_nei[i]))) def iter_node(self,i): """ Ask all known nodes for better known nodes. i is the index of the node in self.nodes. """ nd = self.nodes[i] for kn in nd.get_close(): # for kn in nd.get_known(): # for kn in nd.neighbours: kn_node = self.nodes[kn.lindex] nd.add_known_nodes(kn.path_len,kn_node.get_close()) def iter_all(self): """ Perform a full iteration, where all nodes ask other nodes for better nodes. """ for i in range(self.num_nodes): self.iter_node(i) def converge(self,max_iters=0x10): """ "converge" the DHT by iterating until nothing changes. """ for i in range(max_iters): self.iter_all() print(".",end="",flush=True) if self.verify(): print("\nReached correct succ and pred + fingers.") return print("\nmax_iters acheived.") def verify_succ_pred_fingers(self): """ Verify the succ and pred fingers found for all nodes. """ # Get all nodes (as Knodes), and sort them according to ident: lnodes = list(map(self.make_knode,range(self.num_nodes))) lnodes.sort(key=lambda ln:ln.ident) idents = [ln.ident for ln in lnodes] for i,ln in enumerate(lnodes): nd = self.nodes[ln.lindex] for f in SUCC_FINGERS: ind = bisect.bisect_left(\ idents,nd.get_finger_succ_loc(f)) f_succ = lnodes[(ind) % self.num_nodes] if nd.get_best_succ_finger(f).ident != f_succ.ident: return False for f in PRED_FINGERS: ind = bisect.bisect_right(\ idents,nd.get_finger_pred_loc(f)) f_pred = lnodes[(ind-1) % self.num_nodes] if nd.get_best_pred_finger(f).ident != f_pred.ident: return False return True def verify(self): """ Verify all the found nodes. """ if not self.verify_succ_pred_fingers(): return False return True def sample_path_len(self,num_samp=0x200): """ Find an approximated average to the path_len to successor and predecessor. """ sum_finger_path = 0.0 # We don't want to sample more than the total amount of nodes: num_samp = min([num_samp,self.num_nodes]) snodes = random.sample(self.nodes,num_samp) for sn in snodes: for f in SUCC_FINGERS: sum_finger_path += sn.get_best_succ_finger(f).path_len for f in PRED_FINGERS: sum_finger_path += sn.get_best_pred_finger(f).path_len num_fingers = len(SUCC_FINGERS) + len(PRED_FINGERS) return sum_finger_path/(num_samp * num_fingers) def go(): print("SUCC_FINGERS: ",SUCC_FINGERS) print("PRED_FINGERS: ",PRED_FINGERS) for i in range(7,16): print("i =",i) nei = i # amount of neighbours fk = i # fk = i # fk = 1 n = 2**i vd = VirtualDHT(n,fk=fk,nei=nei) vd.converge(max_iters=0x80) print(vd.sample_path_len()) if __name__ == "__main__": go()
random_line_split
custom_fingers.py
""" 6.12.2014 by real Testing the idea of routing in a mesh using a Virtual DHT. Inspired by "Pushing Chord into the Underlay". """ import random import heapq import bisect from collections import namedtuple # Number of bits in ident number: IDENT_BITS = 40 # Maximum possible identity value. # Note that this value isn't really the maximum. It is maximum + 1. MAX_IDENT = 2**IDENT_BITS # Fingers we are interested in: SUCC_FINGERS = [0] PRED_FINGERS = [0] # SUCC_FINGNERS = list(range(IDENT_BITS)) # PRED_FINGERS = list(range(IDENT_BITS)) # A named tuple for Known node: # path_len is the path length source node, # ident is the identity value of the Known node. # lindex is the list index of the Known node. Knode = namedtuple('Knode', ['path_len', 'ident','lindex']) def rand_ident(): """ Generate random identity in the range [0,MAX_IDENT) """ return random.randrange(MAX_IDENT) def dist_ident(x,y): """ Distance between two nodes (According to ident): """ return (y - x) % MAX_IDENT def remove_knodes_duplicates(knodes): """ Go over a list of knodes, and remove knodes that show up more than once. In case of node ident showing more than once, we pick the shorter path. """ if len(knodes) == 0: return knodes knodes.sort(key=lambda kn:(kn.ident,kn.path_len)) # Resulting array cur_ident = knodes[0].ident res = [knodes[0]] for kn in knodes[1:]: if kn.ident != cur_ident: cur_ident = kn.ident res.append(kn) return res # A node: class Node(): def __init__(self,fk,ident=None): """ Initialize a node. """ # If ident value is not specified, we randomize one: if ident is None: self.ident = rand_ident() else: self.ident = ident # Argument related to amount of known best finger candidates. self.fk = fk # Initialize list of known nodes: self.neighbours = [] self.best_finger_succ = [list() for f in range(IDENT_BITS)] self.best_finger_pred = [list() for f in range(IDENT_BITS)] def get_finger_succ_loc(self,f): """ Get the exact location of successor finger f. """ return (self.ident + 2**f) % MAX_IDENT def get_finger_pred_loc(self,f): """ Get the exact location of predecessor finger f. """ return (self.ident - 2**f) % MAX_IDENT def set_neighbours(self,knodes): """ set knodes to be the neighbours of this Node. """ self.neighbours = [] for kn in knodes: # Make sure we don't have ourselves as a neighbour: if kn.ident == self.ident: continue # A neighbour has a path length 1: self.neighbours.append(\ kn._replace(path_len=1)) # Update known nodes: self.add_known_nodes(0,self.neighbours) def add_known_best_finger_succ(self,f,knodes): """ If any of the nodes in knodes is a better candidate for the f's successor finger, we replace. """ pool = remove_knodes_duplicates(self.best_finger_succ[f] + knodes) self.best_finger_succ[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\ (dist_ident(self.get_finger_succ_loc(f),kn.ident),kn.path_len)) def add_known_best_finger_pred(self,f,knodes): """ If any of the nodes in knodes is a better candidate for the f's predecessor finger, we replace. """ pool = remove_knodes_duplicates(self.best_finger_pred[f] + knodes) self.best_finger_pred[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\ (dist_ident(kn.ident,self.get_finger_pred_loc(f)),kn.path_len)) def add_known_nodes(self,source_path_len,knodes): """ Add a set of known nodes to self.known . Take the change of path_len into acount. """ # Update the path lengths: updated_knodes = [kn._replace(path_len=kn.path_len+source_path_len)\ for kn in knodes] # Make sure the node self.ident is not inside: updated_knodes = list(filter(lambda kn:kn.ident != self.ident,\ updated_knodes)) for f in SUCC_FINGERS: self.add_known_best_finger_succ(f,updated_knodes) for f in PRED_FINGERS: self.add_known_best_finger_pred(f,updated_knodes) def get_known(self): """ Return a list of all known nodes. Items in the list are unique. """ pool = set() # Add neighbours: pool.update(self.neighbours) # Add fingers: for f in SUCC_FINGERS: pool.update(self.best_finger_succ[f]) for f in PRED_FINGERS: pool.update(self.best_finger_pred[f]) return list(pool) def get_close(self): """ Return a list of the closest known nodes. Close in the virtual sense, to self.ident, and to the possible fingers on the Chord DHT. """ pool = set() for f in SUCC_FINGERS: pool.update(self.best_finger_succ[f]) for f in PRED_FINGERS: pool.update(self.best_finger_pred[f]) return list(pool) def get_best_succ_finger(self,f): """ Get the best successor for finger f. """ return min(self.best_finger_succ[f],\ key=lambda kn:dist_ident(self.get_finger_succ_loc(f),kn.ident)) def get_best_pred_finger(self,f): """ Get the best predecessor for finger f. """ return min(self.best_finger_pred[f],\ key=lambda kn:dist_ident(kn.ident,self.get_finger_pred_loc(f))) # Simulation for a mesh network with Virtual DHT abilities: class VirtualDHT(): def __init__(self,n,fk,nei): # Amount of nodes: self.num_nodes = n # Half amount of neighbours per node: self.nei = nei # Known finger nodes parameter: self.fk = fk # Generate nodes and neighbours links: self.gen_nodes() self.rand_neighbours() def gen_nodes(self): """ Generate n nodes with random identity numbers. """ self.nodes = [] for i in range(self.num_nodes): self.nodes.append(Node(self.fk)) def make_knode(self,i,path_len=0): """ Given an index i of a node in self.nodes, create a Knode tuple. Optionally set path_len. """ return Knode(path_len=path_len,\ ident=self.nodes[i].ident,\ lindex=i) def rand_neighbours(self): """ Randomize immediate neighbours links between the nodes. """ # Initialize neighbours sets as empty sets: nodes_nei = [set() for _ in range(self.num_nodes)] for i,nd in enumerate(self.nodes): # Sample a set of indices (Which represent a set of nodes). # Those nodes will be nd's neighbours: nodes_nei[i].update(\ random.sample(range(self.num_nodes),self.nei)) # Remove myself: nodes_nei[i].discard(i) # To make the graph undirected, we add i to be neighbour of all # i's neighbours: for j in nodes_nei[i]: nodes_nei[j].add(i) for i,nd in enumerate(self.nodes): # Initialize a list of neighbours: nd.set_neighbours(map(self.make_knode,list(nodes_nei[i]))) def iter_node(self,i): """ Ask all known nodes for better known nodes. i is the index of the node in self.nodes. """ nd = self.nodes[i] for kn in nd.get_close(): # for kn in nd.get_known(): # for kn in nd.neighbours: kn_node = self.nodes[kn.lindex] nd.add_known_nodes(kn.path_len,kn_node.get_close()) def iter_all(self): """ Perform a full iteration, where all nodes ask other nodes for better nodes. """ for i in range(self.num_nodes): self.iter_node(i) def converge(self,max_iters=0x10): """ "converge" the DHT by iterating until nothing changes. """ for i in range(max_iters): self.iter_all() print(".",end="",flush=True) if self.verify(): print("\nReached correct succ and pred + fingers.") return print("\nmax_iters acheived.") def verify_succ_pred_fingers(self): """ Verify the succ and pred fingers found for all nodes. """ # Get all nodes (as Knodes), and sort them according to ident: lnodes = list(map(self.make_knode,range(self.num_nodes))) lnodes.sort(key=lambda ln:ln.ident) idents = [ln.ident for ln in lnodes] for i,ln in enumerate(lnodes): nd = self.nodes[ln.lindex] for f in SUCC_FINGERS: ind = bisect.bisect_left(\ idents,nd.get_finger_succ_loc(f)) f_succ = lnodes[(ind) % self.num_nodes] if nd.get_best_succ_finger(f).ident != f_succ.ident: return False for f in PRED_FINGERS: ind = bisect.bisect_right(\ idents,nd.get_finger_pred_loc(f)) f_pred = lnodes[(ind-1) % self.num_nodes] if nd.get_best_pred_finger(f).ident != f_pred.ident: return False return True def verify(self): """ Verify all the found nodes. """ if not self.verify_succ_pred_fingers(): return False return True def sample_path_len(self,num_samp=0x200): """ Find an approximated average to the path_len to successor and predecessor. """ sum_finger_path = 0.0 # We don't want to sample more than the total amount of nodes: num_samp = min([num_samp,self.num_nodes]) snodes = random.sample(self.nodes,num_samp) for sn in snodes:
num_fingers = len(SUCC_FINGERS) + len(PRED_FINGERS) return sum_finger_path/(num_samp * num_fingers) def go(): print("SUCC_FINGERS: ",SUCC_FINGERS) print("PRED_FINGERS: ",PRED_FINGERS) for i in range(7,16): print("i =",i) nei = i # amount of neighbours fk = i # fk = i # fk = 1 n = 2**i vd = VirtualDHT(n,fk=fk,nei=nei) vd.converge(max_iters=0x80) print(vd.sample_path_len()) if __name__ == "__main__": go()
for f in SUCC_FINGERS: sum_finger_path += sn.get_best_succ_finger(f).path_len for f in PRED_FINGERS: sum_finger_path += sn.get_best_pred_finger(f).path_len
conditional_block
custom_fingers.py
""" 6.12.2014 by real Testing the idea of routing in a mesh using a Virtual DHT. Inspired by "Pushing Chord into the Underlay". """ import random import heapq import bisect from collections import namedtuple # Number of bits in ident number: IDENT_BITS = 40 # Maximum possible identity value. # Note that this value isn't really the maximum. It is maximum + 1. MAX_IDENT = 2**IDENT_BITS # Fingers we are interested in: SUCC_FINGERS = [0] PRED_FINGERS = [0] # SUCC_FINGNERS = list(range(IDENT_BITS)) # PRED_FINGERS = list(range(IDENT_BITS)) # A named tuple for Known node: # path_len is the path length source node, # ident is the identity value of the Known node. # lindex is the list index of the Known node. Knode = namedtuple('Knode', ['path_len', 'ident','lindex']) def rand_ident(): """ Generate random identity in the range [0,MAX_IDENT) """ return random.randrange(MAX_IDENT) def dist_ident(x,y): """ Distance between two nodes (According to ident): """ return (y - x) % MAX_IDENT def remove_knodes_duplicates(knodes): """ Go over a list of knodes, and remove knodes that show up more than once. In case of node ident showing more than once, we pick the shorter path. """ if len(knodes) == 0: return knodes knodes.sort(key=lambda kn:(kn.ident,kn.path_len)) # Resulting array cur_ident = knodes[0].ident res = [knodes[0]] for kn in knodes[1:]: if kn.ident != cur_ident: cur_ident = kn.ident res.append(kn) return res # A node: class Node(): def __init__(self,fk,ident=None): """ Initialize a node. """ # If ident value is not specified, we randomize one: if ident is None: self.ident = rand_ident() else: self.ident = ident # Argument related to amount of known best finger candidates. self.fk = fk # Initialize list of known nodes: self.neighbours = [] self.best_finger_succ = [list() for f in range(IDENT_BITS)] self.best_finger_pred = [list() for f in range(IDENT_BITS)] def get_finger_succ_loc(self,f): """ Get the exact location of successor finger f. """ return (self.ident + 2**f) % MAX_IDENT def get_finger_pred_loc(self,f): """ Get the exact location of predecessor finger f. """ return (self.ident - 2**f) % MAX_IDENT def set_neighbours(self,knodes): """ set knodes to be the neighbours of this Node. """ self.neighbours = [] for kn in knodes: # Make sure we don't have ourselves as a neighbour: if kn.ident == self.ident: continue # A neighbour has a path length 1: self.neighbours.append(\ kn._replace(path_len=1)) # Update known nodes: self.add_known_nodes(0,self.neighbours) def add_known_best_finger_succ(self,f,knodes): """ If any of the nodes in knodes is a better candidate for the f's successor finger, we replace. """ pool = remove_knodes_duplicates(self.best_finger_succ[f] + knodes) self.best_finger_succ[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\ (dist_ident(self.get_finger_succ_loc(f),kn.ident),kn.path_len)) def add_known_best_finger_pred(self,f,knodes): """ If any of the nodes in knodes is a better candidate for the f's predecessor finger, we replace. """ pool = remove_knodes_duplicates(self.best_finger_pred[f] + knodes) self.best_finger_pred[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\ (dist_ident(kn.ident,self.get_finger_pred_loc(f)),kn.path_len)) def add_known_nodes(self,source_path_len,knodes): """ Add a set of known nodes to self.known . Take the change of path_len into acount. """ # Update the path lengths: updated_knodes = [kn._replace(path_len=kn.path_len+source_path_len)\ for kn in knodes] # Make sure the node self.ident is not inside: updated_knodes = list(filter(lambda kn:kn.ident != self.ident,\ updated_knodes)) for f in SUCC_FINGERS: self.add_known_best_finger_succ(f,updated_knodes) for f in PRED_FINGERS: self.add_known_best_finger_pred(f,updated_knodes) def get_known(self): """ Return a list of all known nodes. Items in the list are unique. """ pool = set() # Add neighbours: pool.update(self.neighbours) # Add fingers: for f in SUCC_FINGERS: pool.update(self.best_finger_succ[f]) for f in PRED_FINGERS: pool.update(self.best_finger_pred[f]) return list(pool) def get_close(self): """ Return a list of the closest known nodes. Close in the virtual sense, to self.ident, and to the possible fingers on the Chord DHT. """ pool = set() for f in SUCC_FINGERS: pool.update(self.best_finger_succ[f]) for f in PRED_FINGERS: pool.update(self.best_finger_pred[f]) return list(pool) def get_best_succ_finger(self,f): """ Get the best successor for finger f. """ return min(self.best_finger_succ[f],\ key=lambda kn:dist_ident(self.get_finger_succ_loc(f),kn.ident)) def get_best_pred_finger(self,f): """ Get the best predecessor for finger f. """ return min(self.best_finger_pred[f],\ key=lambda kn:dist_ident(kn.ident,self.get_finger_pred_loc(f))) # Simulation for a mesh network with Virtual DHT abilities: class VirtualDHT(): def __init__(self,n,fk,nei): # Amount of nodes: self.num_nodes = n # Half amount of neighbours per node: self.nei = nei # Known finger nodes parameter: self.fk = fk # Generate nodes and neighbours links: self.gen_nodes() self.rand_neighbours() def gen_nodes(self): """ Generate n nodes with random identity numbers. """ self.nodes = [] for i in range(self.num_nodes): self.nodes.append(Node(self.fk)) def make_knode(self,i,path_len=0): """ Given an index i of a node in self.nodes, create a Knode tuple. Optionally set path_len. """ return Knode(path_len=path_len,\ ident=self.nodes[i].ident,\ lindex=i) def rand_neighbours(self): """ Randomize immediate neighbours links between the nodes. """ # Initialize neighbours sets as empty sets: nodes_nei = [set() for _ in range(self.num_nodes)] for i,nd in enumerate(self.nodes): # Sample a set of indices (Which represent a set of nodes). # Those nodes will be nd's neighbours: nodes_nei[i].update(\ random.sample(range(self.num_nodes),self.nei)) # Remove myself: nodes_nei[i].discard(i) # To make the graph undirected, we add i to be neighbour of all # i's neighbours: for j in nodes_nei[i]: nodes_nei[j].add(i) for i,nd in enumerate(self.nodes): # Initialize a list of neighbours: nd.set_neighbours(map(self.make_knode,list(nodes_nei[i]))) def iter_node(self,i): """ Ask all known nodes for better known nodes. i is the index of the node in self.nodes. """ nd = self.nodes[i] for kn in nd.get_close(): # for kn in nd.get_known(): # for kn in nd.neighbours: kn_node = self.nodes[kn.lindex] nd.add_known_nodes(kn.path_len,kn_node.get_close()) def iter_all(self): """ Perform a full iteration, where all nodes ask other nodes for better nodes. """ for i in range(self.num_nodes): self.iter_node(i) def converge(self,max_iters=0x10):
def verify_succ_pred_fingers(self): """ Verify the succ and pred fingers found for all nodes. """ # Get all nodes (as Knodes), and sort them according to ident: lnodes = list(map(self.make_knode,range(self.num_nodes))) lnodes.sort(key=lambda ln:ln.ident) idents = [ln.ident for ln in lnodes] for i,ln in enumerate(lnodes): nd = self.nodes[ln.lindex] for f in SUCC_FINGERS: ind = bisect.bisect_left(\ idents,nd.get_finger_succ_loc(f)) f_succ = lnodes[(ind) % self.num_nodes] if nd.get_best_succ_finger(f).ident != f_succ.ident: return False for f in PRED_FINGERS: ind = bisect.bisect_right(\ idents,nd.get_finger_pred_loc(f)) f_pred = lnodes[(ind-1) % self.num_nodes] if nd.get_best_pred_finger(f).ident != f_pred.ident: return False return True def verify(self): """ Verify all the found nodes. """ if not self.verify_succ_pred_fingers(): return False return True def sample_path_len(self,num_samp=0x200): """ Find an approximated average to the path_len to successor and predecessor. """ sum_finger_path = 0.0 # We don't want to sample more than the total amount of nodes: num_samp = min([num_samp,self.num_nodes]) snodes = random.sample(self.nodes,num_samp) for sn in snodes: for f in SUCC_FINGERS: sum_finger_path += sn.get_best_succ_finger(f).path_len for f in PRED_FINGERS: sum_finger_path += sn.get_best_pred_finger(f).path_len num_fingers = len(SUCC_FINGERS) + len(PRED_FINGERS) return sum_finger_path/(num_samp * num_fingers) def go(): print("SUCC_FINGERS: ",SUCC_FINGERS) print("PRED_FINGERS: ",PRED_FINGERS) for i in range(7,16): print("i =",i) nei = i # amount of neighbours fk = i # fk = i # fk = 1 n = 2**i vd = VirtualDHT(n,fk=fk,nei=nei) vd.converge(max_iters=0x80) print(vd.sample_path_len()) if __name__ == "__main__": go()
""" "converge" the DHT by iterating until nothing changes. """ for i in range(max_iters): self.iter_all() print(".",end="",flush=True) if self.verify(): print("\nReached correct succ and pred + fingers.") return print("\nmax_iters acheived.")
identifier_body
custom_fingers.py
""" 6.12.2014 by real Testing the idea of routing in a mesh using a Virtual DHT. Inspired by "Pushing Chord into the Underlay". """ import random import heapq import bisect from collections import namedtuple # Number of bits in ident number: IDENT_BITS = 40 # Maximum possible identity value. # Note that this value isn't really the maximum. It is maximum + 1. MAX_IDENT = 2**IDENT_BITS # Fingers we are interested in: SUCC_FINGERS = [0] PRED_FINGERS = [0] # SUCC_FINGNERS = list(range(IDENT_BITS)) # PRED_FINGERS = list(range(IDENT_BITS)) # A named tuple for Known node: # path_len is the path length source node, # ident is the identity value of the Known node. # lindex is the list index of the Known node. Knode = namedtuple('Knode', ['path_len', 'ident','lindex']) def rand_ident(): """ Generate random identity in the range [0,MAX_IDENT) """ return random.randrange(MAX_IDENT) def dist_ident(x,y): """ Distance between two nodes (According to ident): """ return (y - x) % MAX_IDENT def remove_knodes_duplicates(knodes): """ Go over a list of knodes, and remove knodes that show up more than once. In case of node ident showing more than once, we pick the shorter path. """ if len(knodes) == 0: return knodes knodes.sort(key=lambda kn:(kn.ident,kn.path_len)) # Resulting array cur_ident = knodes[0].ident res = [knodes[0]] for kn in knodes[1:]: if kn.ident != cur_ident: cur_ident = kn.ident res.append(kn) return res # A node: class Node(): def __init__(self,fk,ident=None): """ Initialize a node. """ # If ident value is not specified, we randomize one: if ident is None: self.ident = rand_ident() else: self.ident = ident # Argument related to amount of known best finger candidates. self.fk = fk # Initialize list of known nodes: self.neighbours = [] self.best_finger_succ = [list() for f in range(IDENT_BITS)] self.best_finger_pred = [list() for f in range(IDENT_BITS)] def get_finger_succ_loc(self,f): """ Get the exact location of successor finger f. """ return (self.ident + 2**f) % MAX_IDENT def get_finger_pred_loc(self,f): """ Get the exact location of predecessor finger f. """ return (self.ident - 2**f) % MAX_IDENT def set_neighbours(self,knodes): """ set knodes to be the neighbours of this Node. """ self.neighbours = [] for kn in knodes: # Make sure we don't have ourselves as a neighbour: if kn.ident == self.ident: continue # A neighbour has a path length 1: self.neighbours.append(\ kn._replace(path_len=1)) # Update known nodes: self.add_known_nodes(0,self.neighbours) def add_known_best_finger_succ(self,f,knodes): """ If any of the nodes in knodes is a better candidate for the f's successor finger, we replace. """ pool = remove_knodes_duplicates(self.best_finger_succ[f] + knodes) self.best_finger_succ[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\ (dist_ident(self.get_finger_succ_loc(f),kn.ident),kn.path_len)) def add_known_best_finger_pred(self,f,knodes): """ If any of the nodes in knodes is a better candidate for the f's predecessor finger, we replace. """ pool = remove_knodes_duplicates(self.best_finger_pred[f] + knodes) self.best_finger_pred[f] = heapq.nsmallest(self.fk,pool,key=lambda kn:\ (dist_ident(kn.ident,self.get_finger_pred_loc(f)),kn.path_len)) def add_known_nodes(self,source_path_len,knodes): """ Add a set of known nodes to self.known . Take the change of path_len into acount. """ # Update the path lengths: updated_knodes = [kn._replace(path_len=kn.path_len+source_path_len)\ for kn in knodes] # Make sure the node self.ident is not inside: updated_knodes = list(filter(lambda kn:kn.ident != self.ident,\ updated_knodes)) for f in SUCC_FINGERS: self.add_known_best_finger_succ(f,updated_knodes) for f in PRED_FINGERS: self.add_known_best_finger_pred(f,updated_knodes) def get_known(self): """ Return a list of all known nodes. Items in the list are unique. """ pool = set() # Add neighbours: pool.update(self.neighbours) # Add fingers: for f in SUCC_FINGERS: pool.update(self.best_finger_succ[f]) for f in PRED_FINGERS: pool.update(self.best_finger_pred[f]) return list(pool) def get_close(self): """ Return a list of the closest known nodes. Close in the virtual sense, to self.ident, and to the possible fingers on the Chord DHT. """ pool = set() for f in SUCC_FINGERS: pool.update(self.best_finger_succ[f]) for f in PRED_FINGERS: pool.update(self.best_finger_pred[f]) return list(pool) def get_best_succ_finger(self,f): """ Get the best successor for finger f. """ return min(self.best_finger_succ[f],\ key=lambda kn:dist_ident(self.get_finger_succ_loc(f),kn.ident)) def get_best_pred_finger(self,f): """ Get the best predecessor for finger f. """ return min(self.best_finger_pred[f],\ key=lambda kn:dist_ident(kn.ident,self.get_finger_pred_loc(f))) # Simulation for a mesh network with Virtual DHT abilities: class VirtualDHT(): def __init__(self,n,fk,nei): # Amount of nodes: self.num_nodes = n # Half amount of neighbours per node: self.nei = nei # Known finger nodes parameter: self.fk = fk # Generate nodes and neighbours links: self.gen_nodes() self.rand_neighbours() def
(self): """ Generate n nodes with random identity numbers. """ self.nodes = [] for i in range(self.num_nodes): self.nodes.append(Node(self.fk)) def make_knode(self,i,path_len=0): """ Given an index i of a node in self.nodes, create a Knode tuple. Optionally set path_len. """ return Knode(path_len=path_len,\ ident=self.nodes[i].ident,\ lindex=i) def rand_neighbours(self): """ Randomize immediate neighbours links between the nodes. """ # Initialize neighbours sets as empty sets: nodes_nei = [set() for _ in range(self.num_nodes)] for i,nd in enumerate(self.nodes): # Sample a set of indices (Which represent a set of nodes). # Those nodes will be nd's neighbours: nodes_nei[i].update(\ random.sample(range(self.num_nodes),self.nei)) # Remove myself: nodes_nei[i].discard(i) # To make the graph undirected, we add i to be neighbour of all # i's neighbours: for j in nodes_nei[i]: nodes_nei[j].add(i) for i,nd in enumerate(self.nodes): # Initialize a list of neighbours: nd.set_neighbours(map(self.make_knode,list(nodes_nei[i]))) def iter_node(self,i): """ Ask all known nodes for better known nodes. i is the index of the node in self.nodes. """ nd = self.nodes[i] for kn in nd.get_close(): # for kn in nd.get_known(): # for kn in nd.neighbours: kn_node = self.nodes[kn.lindex] nd.add_known_nodes(kn.path_len,kn_node.get_close()) def iter_all(self): """ Perform a full iteration, where all nodes ask other nodes for better nodes. """ for i in range(self.num_nodes): self.iter_node(i) def converge(self,max_iters=0x10): """ "converge" the DHT by iterating until nothing changes. """ for i in range(max_iters): self.iter_all() print(".",end="",flush=True) if self.verify(): print("\nReached correct succ and pred + fingers.") return print("\nmax_iters acheived.") def verify_succ_pred_fingers(self): """ Verify the succ and pred fingers found for all nodes. """ # Get all nodes (as Knodes), and sort them according to ident: lnodes = list(map(self.make_knode,range(self.num_nodes))) lnodes.sort(key=lambda ln:ln.ident) idents = [ln.ident for ln in lnodes] for i,ln in enumerate(lnodes): nd = self.nodes[ln.lindex] for f in SUCC_FINGERS: ind = bisect.bisect_left(\ idents,nd.get_finger_succ_loc(f)) f_succ = lnodes[(ind) % self.num_nodes] if nd.get_best_succ_finger(f).ident != f_succ.ident: return False for f in PRED_FINGERS: ind = bisect.bisect_right(\ idents,nd.get_finger_pred_loc(f)) f_pred = lnodes[(ind-1) % self.num_nodes] if nd.get_best_pred_finger(f).ident != f_pred.ident: return False return True def verify(self): """ Verify all the found nodes. """ if not self.verify_succ_pred_fingers(): return False return True def sample_path_len(self,num_samp=0x200): """ Find an approximated average to the path_len to successor and predecessor. """ sum_finger_path = 0.0 # We don't want to sample more than the total amount of nodes: num_samp = min([num_samp,self.num_nodes]) snodes = random.sample(self.nodes,num_samp) for sn in snodes: for f in SUCC_FINGERS: sum_finger_path += sn.get_best_succ_finger(f).path_len for f in PRED_FINGERS: sum_finger_path += sn.get_best_pred_finger(f).path_len num_fingers = len(SUCC_FINGERS) + len(PRED_FINGERS) return sum_finger_path/(num_samp * num_fingers) def go(): print("SUCC_FINGERS: ",SUCC_FINGERS) print("PRED_FINGERS: ",PRED_FINGERS) for i in range(7,16): print("i =",i) nei = i # amount of neighbours fk = i # fk = i # fk = 1 n = 2**i vd = VirtualDHT(n,fk=fk,nei=nei) vd.converge(max_iters=0x80) print(vd.sample_path_len()) if __name__ == "__main__": go()
gen_nodes
identifier_name
utils_test.go
package ante_test import ( "math" "math/big" "testing" "time" sdkmath "cosmossdk.io/math" "github.com/stretchr/testify/suite" "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/x/auth/migrations/legacytx" types2 "github.com/cosmos/cosmos-sdk/x/bank/types" types3 "github.com/cosmos/cosmos-sdk/x/staking/types" "github.com/ethereum/go-ethereum/crypto" "github.com/evmos/ethermint/ethereum/eip712" "github.com/evmos/ethermint/types" "github.com/ethereum/go-ethereum/common" ethtypes "github.com/ethereum/go-ethereum/core/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/tx" codectypes "github.com/cosmos/cosmos-sdk/codec/types" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" "github.com/cosmos/cosmos-sdk/simapp" "github.com/cosmos/cosmos-sdk/testutil/testdata" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/tx/signing" authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing" authtx "github.com/cosmos/cosmos-sdk/x/auth/tx" authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" "github.com/evmos/ethermint/app" ante "github.com/evmos/ethermint/app/ante" "github.com/evmos/ethermint/encoding" "github.com/evmos/ethermint/tests" "github.com/evmos/ethermint/x/evm/statedb" evmtypes "github.com/evmos/ethermint/x/evm/types" feemarkettypes "github.com/evmos/ethermint/x/feemarket/types" tmproto "github.com/tendermint/tendermint/proto/tendermint/types" ) type AnteTestSuite struct { suite.Suite ctx sdk.Context app *app.EthermintApp clientCtx client.Context anteHandler sdk.AnteHandler ethSigner ethtypes.Signer enableFeemarket bool enableLondonHF bool evmParamsOption func(*evmtypes.Params) } const TestGasLimit uint64 = 100000 func (suite *AnteTestSuite) StateDB() *statedb.StateDB { return statedb.New(suite.ctx, suite.app.EvmKeeper, statedb.NewEmptyTxConfig(common.BytesToHash(suite.ctx.HeaderHash().Bytes()))) } func (suite *AnteTestSuite) SetupTest() { checkTx := false suite.app = app.Setup(checkTx, func(app *app.EthermintApp, genesis simapp.GenesisState) simapp.GenesisState { if suite.enableFeemarket { // setup feemarketGenesis params feemarketGenesis := feemarkettypes.DefaultGenesisState() feemarketGenesis.Params.EnableHeight = 1 feemarketGenesis.Params.NoBaseFee = false // Verify feeMarket genesis err := feemarketGenesis.Validate() suite.Require().NoError(err) genesis[feemarkettypes.ModuleName] = app.AppCodec().MustMarshalJSON(feemarketGenesis) } evmGenesis := evmtypes.DefaultGenesisState() evmGenesis.Params.AllowUnprotectedTxs = false if !suite.enableLondonHF { maxInt := sdkmath.NewInt(math.MaxInt64) evmGenesis.Params.ChainConfig.LondonBlock = &maxInt evmGenesis.Params.ChainConfig.ArrowGlacierBlock = &maxInt evmGenesis.Params.ChainConfig.GrayGlacierBlock = &maxInt evmGenesis.Params.ChainConfig.MergeNetsplitBlock = &maxInt } if suite.evmParamsOption != nil { suite.evmParamsOption(&evmGenesis.Params) } genesis[evmtypes.ModuleName] = app.AppCodec().MustMarshalJSON(evmGenesis) return genesis }) suite.ctx = suite.app.BaseApp.NewContext(checkTx, tmproto.Header{Height: 2, ChainID: "ethermint_9000-1", Time: time.Now().UTC()}) suite.ctx = suite.ctx.WithMinGasPrices(sdk.NewDecCoins(sdk.NewDecCoin(evmtypes.DefaultEVMDenom, sdk.OneInt()))) suite.ctx = suite.ctx.WithBlockGasMeter(sdk.NewGasMeter(1000000000000000000)) suite.app.EvmKeeper.WithChainID(suite.ctx) infCtx := suite.ctx.WithGasMeter(sdk.NewInfiniteGasMeter()) suite.app.AccountKeeper.SetParams(infCtx, authtypes.DefaultParams()) encodingConfig := encoding.MakeConfig(app.ModuleBasics) // We're using TestMsg amino encoding in some tests, so register it here. encodingConfig.Amino.RegisterConcrete(&testdata.TestMsg{}, "testdata.TestMsg", nil) suite.clientCtx = client.Context{}.WithTxConfig(encodingConfig.TxConfig) anteHandler, err := ante.NewAnteHandler(ante.HandlerOptions{ AccountKeeper: suite.app.AccountKeeper, BankKeeper: suite.app.BankKeeper, EvmKeeper: suite.app.EvmKeeper, FeegrantKeeper: suite.app.FeeGrantKeeper, IBCKeeper: suite.app.IBCKeeper, FeeMarketKeeper: suite.app.FeeMarketKeeper, SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), SigGasConsumer: ante.DefaultSigVerificationGasConsumer, }) suite.Require().NoError(err) suite.anteHandler = anteHandler suite.ethSigner = ethtypes.LatestSignerForChainID(suite.app.EvmKeeper.ChainID()) } func TestAnteTestSuite(t *testing.T) { suite.Run(t, &AnteTestSuite{ enableLondonHF: true, }) } func (s *AnteTestSuite) BuildTestEthTx( from common.Address, to common.Address, amount *big.Int, input []byte, gasPrice *big.Int, gasFeeCap *big.Int, gasTipCap *big.Int, accesses *ethtypes.AccessList, ) *evmtypes.MsgEthereumTx { chainID := s.app.EvmKeeper.ChainID() nonce := s.app.EvmKeeper.GetNonce( s.ctx, common.BytesToAddress(from.Bytes()), ) msgEthereumTx := evmtypes.NewTx( chainID, nonce, &to, amount, TestGasLimit, gasPrice, gasFeeCap, gasTipCap, input, accesses, ) msgEthereumTx.From = from.String() return msgEthereumTx } // CreateTestTx is a helper function to create a tx given multiple inputs. func (suite *AnteTestSuite) CreateTestTx( msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool, unsetExtensionOptions ...bool, ) authsigning.Tx { return suite.CreateTestTxBuilder(msg, priv, accNum, signCosmosTx).GetTx() } // CreateTestTxBuilder is a helper function to create a tx builder given multiple inputs. func (suite *AnteTestSuite) CreateTestTxBuilder( msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool, unsetExtensionOptions ...bool, ) client.TxBuilder { var option *codectypes.Any var err error if len(unsetExtensionOptions) == 0 { option, err = codectypes.NewAnyWithValue(&evmtypes.ExtensionOptionsEthereumTx{}) suite.Require().NoError(err) } txBuilder := suite.clientCtx.TxConfig.NewTxBuilder() builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder) suite.Require().True(ok) if len(unsetExtensionOptions) == 0 { builder.SetExtensionOptions(option) } err = msg.Sign(suite.ethSigner, tests.NewSigner(priv)) suite.Require().NoError(err) msg.From = "" err = builder.SetMsgs(msg) suite.Require().NoError(err) txData, err := evmtypes.UnpackTxData(msg.Data) suite.Require().NoError(err) fees := sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewIntFromBigInt(txData.Fee()))) builder.SetFeeAmount(fees) builder.SetGasLimit(msg.GetGas()) if signCosmosTx { // First round: we gather all the signer infos. We use the "set empty // signature" hack to do that. sigV2 := signing.SignatureV2{ PubKey: priv.PubKey(), Data: &signing.SingleSignatureData{ SignMode: suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(), Signature: nil, }, Sequence: txData.GetNonce(), } sigsV2 := []signing.SignatureV2{sigV2} err = txBuilder.SetSignatures(sigsV2...) suite.Require().NoError(err) // Second round: all signer infos are set, so each signer can sign. signerData := authsigning.SignerData{ ChainID: suite.ctx.ChainID(), AccountNumber: accNum, Sequence: txData.GetNonce(), } sigV2, err = tx.SignWithPrivKey( suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(), signerData, txBuilder, priv, suite.clientCtx.TxConfig, txData.GetNonce(), ) suite.Require().NoError(err) sigsV2 = []signing.SignatureV2{sigV2} err = txBuilder.SetSignatures(sigsV2...) suite.Require().NoError(err) } return txBuilder } func (suite *AnteTestSuite) CreateTestCosmosTxBuilder(gasPrice sdkmath.Int, denom string, msgs ...sdk.Msg) client.TxBuilder { txBuilder := suite.clientCtx.TxConfig.NewTxBuilder() txBuilder.SetGasLimit(TestGasLimit) fees := &sdk.Coins{{Denom: denom, Amount: gasPrice.MulRaw(int64(TestGasLimit))}} txBuilder.SetFeeAmount(*fees) err := txBuilder.SetMsgs(msgs...) suite.Require().NoError(err) return txBuilder } func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgSend(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder { // Build MsgSend recipient := sdk.AccAddress(common.Address{}.Bytes()) msgSend := types2.NewMsgSend(from, recipient, sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewInt(1)))) return suite.CreateTestEIP712CosmosTxBuilder(from, priv, chainId, gas, gasAmount, msgSend) } func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgDelegate(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder { // Build MsgSend valEthAddr := tests.GenerateAddress() valAddr := sdk.ValAddress(valEthAddr.Bytes()) msgSend := types3.NewMsgDelegate(from, valAddr, sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewInt(20))) return suite.CreateTestEIP712CosmosTxBuilder(from, priv, chainId, gas, gasAmount, msgSend) } func (suite *AnteTestSuite) CreateTestEIP712CosmosTxBuilder( from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins, msg sdk.Msg, ) client.TxBuilder { var err error nonce, err := suite.app.AccountKeeper.GetSequence(suite.ctx, from) suite.Require().NoError(err) pc, err := types.ParseChainID(chainId) suite.Require().NoError(err) ethChainId := pc.Uint64() // GenerateTypedData TypedData var ethermintCodec codec.ProtoCodecMarshaler fee := legacytx.NewStdFee(gas, gasAmount) accNumber := suite.app.AccountKeeper.GetAccount(suite.ctx, from).GetAccountNumber() data := legacytx.StdSignBytes(chainId, accNumber, nonce, 0, fee, []sdk.Msg{msg}, "", nil) typedData, err := eip712.WrapTxToTypedData(ethermintCodec, ethChainId, msg, data, &eip712.FeeDelegationOptions{ FeePayer: from, }) suite.Require().NoError(err) sigHash, err := eip712.ComputeTypedDataHash(typedData) suite.Require().NoError(err) // Sign typedData keyringSigner := tests.NewSigner(priv) signature, pubKey, err := keyringSigner.SignByAddress(from, sigHash) suite.Require().NoError(err) signature[crypto.RecoveryIDOffset] += 27 // Transform V from 0/1 to 27/28 according to the yellow paper // Add ExtensionOptionsWeb3Tx extension var option *codectypes.Any option, err = codectypes.NewAnyWithValue(&types.ExtensionOptionsWeb3Tx{ FeePayer: from.String(), TypedDataChainID: ethChainId, FeePayerSig: signature, }) suite.Require().NoError(err) suite.clientCtx.TxConfig.SignModeHandler() txBuilder := suite.clientCtx.TxConfig.NewTxBuilder() builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder) suite.Require().True(ok) builder.SetExtensionOptions(option) builder.SetFeeAmount(gasAmount) builder.SetGasLimit(gas) sigsV2 := signing.SignatureV2{ PubKey: pubKey, Data: &signing.SingleSignatureData{ SignMode: signing.SignMode_SIGN_MODE_LEGACY_AMINO_JSON, }, Sequence: nonce, } err = builder.SetSignatures(sigsV2) suite.Require().NoError(err) err = builder.SetMsgs(msg) suite.Require().NoError(err) return builder } func NextFn(ctx sdk.Context, _ sdk.Tx, _ bool) (sdk.Context, error) { return ctx, nil } var _ sdk.Tx = &invalidTx{} type invalidTx struct{} func (invalidTx)
() []sdk.Msg { return []sdk.Msg{nil} } func (invalidTx) ValidateBasic() error { return nil }
GetMsgs
identifier_name
utils_test.go
package ante_test import ( "math" "math/big" "testing" "time" sdkmath "cosmossdk.io/math" "github.com/stretchr/testify/suite" "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/x/auth/migrations/legacytx" types2 "github.com/cosmos/cosmos-sdk/x/bank/types" types3 "github.com/cosmos/cosmos-sdk/x/staking/types" "github.com/ethereum/go-ethereum/crypto" "github.com/evmos/ethermint/ethereum/eip712" "github.com/evmos/ethermint/types" "github.com/ethereum/go-ethereum/common" ethtypes "github.com/ethereum/go-ethereum/core/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/tx" codectypes "github.com/cosmos/cosmos-sdk/codec/types" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" "github.com/cosmos/cosmos-sdk/simapp" "github.com/cosmos/cosmos-sdk/testutil/testdata" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/tx/signing" authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing" authtx "github.com/cosmos/cosmos-sdk/x/auth/tx" authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" "github.com/evmos/ethermint/app" ante "github.com/evmos/ethermint/app/ante" "github.com/evmos/ethermint/encoding" "github.com/evmos/ethermint/tests" "github.com/evmos/ethermint/x/evm/statedb" evmtypes "github.com/evmos/ethermint/x/evm/types" feemarkettypes "github.com/evmos/ethermint/x/feemarket/types" tmproto "github.com/tendermint/tendermint/proto/tendermint/types" ) type AnteTestSuite struct { suite.Suite ctx sdk.Context app *app.EthermintApp clientCtx client.Context anteHandler sdk.AnteHandler ethSigner ethtypes.Signer enableFeemarket bool enableLondonHF bool evmParamsOption func(*evmtypes.Params) } const TestGasLimit uint64 = 100000 func (suite *AnteTestSuite) StateDB() *statedb.StateDB { return statedb.New(suite.ctx, suite.app.EvmKeeper, statedb.NewEmptyTxConfig(common.BytesToHash(suite.ctx.HeaderHash().Bytes()))) } func (suite *AnteTestSuite) SetupTest() { checkTx := false suite.app = app.Setup(checkTx, func(app *app.EthermintApp, genesis simapp.GenesisState) simapp.GenesisState { if suite.enableFeemarket { // setup feemarketGenesis params feemarketGenesis := feemarkettypes.DefaultGenesisState() feemarketGenesis.Params.EnableHeight = 1 feemarketGenesis.Params.NoBaseFee = false // Verify feeMarket genesis err := feemarketGenesis.Validate() suite.Require().NoError(err) genesis[feemarkettypes.ModuleName] = app.AppCodec().MustMarshalJSON(feemarketGenesis) } evmGenesis := evmtypes.DefaultGenesisState() evmGenesis.Params.AllowUnprotectedTxs = false if !suite.enableLondonHF { maxInt := sdkmath.NewInt(math.MaxInt64) evmGenesis.Params.ChainConfig.LondonBlock = &maxInt evmGenesis.Params.ChainConfig.ArrowGlacierBlock = &maxInt evmGenesis.Params.ChainConfig.GrayGlacierBlock = &maxInt evmGenesis.Params.ChainConfig.MergeNetsplitBlock = &maxInt } if suite.evmParamsOption != nil { suite.evmParamsOption(&evmGenesis.Params) } genesis[evmtypes.ModuleName] = app.AppCodec().MustMarshalJSON(evmGenesis) return genesis }) suite.ctx = suite.app.BaseApp.NewContext(checkTx, tmproto.Header{Height: 2, ChainID: "ethermint_9000-1", Time: time.Now().UTC()}) suite.ctx = suite.ctx.WithMinGasPrices(sdk.NewDecCoins(sdk.NewDecCoin(evmtypes.DefaultEVMDenom, sdk.OneInt()))) suite.ctx = suite.ctx.WithBlockGasMeter(sdk.NewGasMeter(1000000000000000000)) suite.app.EvmKeeper.WithChainID(suite.ctx) infCtx := suite.ctx.WithGasMeter(sdk.NewInfiniteGasMeter()) suite.app.AccountKeeper.SetParams(infCtx, authtypes.DefaultParams()) encodingConfig := encoding.MakeConfig(app.ModuleBasics)
// We're using TestMsg amino encoding in some tests, so register it here. encodingConfig.Amino.RegisterConcrete(&testdata.TestMsg{}, "testdata.TestMsg", nil) suite.clientCtx = client.Context{}.WithTxConfig(encodingConfig.TxConfig) anteHandler, err := ante.NewAnteHandler(ante.HandlerOptions{ AccountKeeper: suite.app.AccountKeeper, BankKeeper: suite.app.BankKeeper, EvmKeeper: suite.app.EvmKeeper, FeegrantKeeper: suite.app.FeeGrantKeeper, IBCKeeper: suite.app.IBCKeeper, FeeMarketKeeper: suite.app.FeeMarketKeeper, SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), SigGasConsumer: ante.DefaultSigVerificationGasConsumer, }) suite.Require().NoError(err) suite.anteHandler = anteHandler suite.ethSigner = ethtypes.LatestSignerForChainID(suite.app.EvmKeeper.ChainID()) } func TestAnteTestSuite(t *testing.T) { suite.Run(t, &AnteTestSuite{ enableLondonHF: true, }) } func (s *AnteTestSuite) BuildTestEthTx( from common.Address, to common.Address, amount *big.Int, input []byte, gasPrice *big.Int, gasFeeCap *big.Int, gasTipCap *big.Int, accesses *ethtypes.AccessList, ) *evmtypes.MsgEthereumTx { chainID := s.app.EvmKeeper.ChainID() nonce := s.app.EvmKeeper.GetNonce( s.ctx, common.BytesToAddress(from.Bytes()), ) msgEthereumTx := evmtypes.NewTx( chainID, nonce, &to, amount, TestGasLimit, gasPrice, gasFeeCap, gasTipCap, input, accesses, ) msgEthereumTx.From = from.String() return msgEthereumTx } // CreateTestTx is a helper function to create a tx given multiple inputs. func (suite *AnteTestSuite) CreateTestTx( msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool, unsetExtensionOptions ...bool, ) authsigning.Tx { return suite.CreateTestTxBuilder(msg, priv, accNum, signCosmosTx).GetTx() } // CreateTestTxBuilder is a helper function to create a tx builder given multiple inputs. func (suite *AnteTestSuite) CreateTestTxBuilder( msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool, unsetExtensionOptions ...bool, ) client.TxBuilder { var option *codectypes.Any var err error if len(unsetExtensionOptions) == 0 { option, err = codectypes.NewAnyWithValue(&evmtypes.ExtensionOptionsEthereumTx{}) suite.Require().NoError(err) } txBuilder := suite.clientCtx.TxConfig.NewTxBuilder() builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder) suite.Require().True(ok) if len(unsetExtensionOptions) == 0 { builder.SetExtensionOptions(option) } err = msg.Sign(suite.ethSigner, tests.NewSigner(priv)) suite.Require().NoError(err) msg.From = "" err = builder.SetMsgs(msg) suite.Require().NoError(err) txData, err := evmtypes.UnpackTxData(msg.Data) suite.Require().NoError(err) fees := sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewIntFromBigInt(txData.Fee()))) builder.SetFeeAmount(fees) builder.SetGasLimit(msg.GetGas()) if signCosmosTx { // First round: we gather all the signer infos. We use the "set empty // signature" hack to do that. sigV2 := signing.SignatureV2{ PubKey: priv.PubKey(), Data: &signing.SingleSignatureData{ SignMode: suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(), Signature: nil, }, Sequence: txData.GetNonce(), } sigsV2 := []signing.SignatureV2{sigV2} err = txBuilder.SetSignatures(sigsV2...) suite.Require().NoError(err) // Second round: all signer infos are set, so each signer can sign. signerData := authsigning.SignerData{ ChainID: suite.ctx.ChainID(), AccountNumber: accNum, Sequence: txData.GetNonce(), } sigV2, err = tx.SignWithPrivKey( suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(), signerData, txBuilder, priv, suite.clientCtx.TxConfig, txData.GetNonce(), ) suite.Require().NoError(err) sigsV2 = []signing.SignatureV2{sigV2} err = txBuilder.SetSignatures(sigsV2...) suite.Require().NoError(err) } return txBuilder } func (suite *AnteTestSuite) CreateTestCosmosTxBuilder(gasPrice sdkmath.Int, denom string, msgs ...sdk.Msg) client.TxBuilder { txBuilder := suite.clientCtx.TxConfig.NewTxBuilder() txBuilder.SetGasLimit(TestGasLimit) fees := &sdk.Coins{{Denom: denom, Amount: gasPrice.MulRaw(int64(TestGasLimit))}} txBuilder.SetFeeAmount(*fees) err := txBuilder.SetMsgs(msgs...) suite.Require().NoError(err) return txBuilder } func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgSend(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder { // Build MsgSend recipient := sdk.AccAddress(common.Address{}.Bytes()) msgSend := types2.NewMsgSend(from, recipient, sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewInt(1)))) return suite.CreateTestEIP712CosmosTxBuilder(from, priv, chainId, gas, gasAmount, msgSend) } func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgDelegate(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder { // Build MsgSend valEthAddr := tests.GenerateAddress() valAddr := sdk.ValAddress(valEthAddr.Bytes()) msgSend := types3.NewMsgDelegate(from, valAddr, sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewInt(20))) return suite.CreateTestEIP712CosmosTxBuilder(from, priv, chainId, gas, gasAmount, msgSend) } func (suite *AnteTestSuite) CreateTestEIP712CosmosTxBuilder( from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins, msg sdk.Msg, ) client.TxBuilder { var err error nonce, err := suite.app.AccountKeeper.GetSequence(suite.ctx, from) suite.Require().NoError(err) pc, err := types.ParseChainID(chainId) suite.Require().NoError(err) ethChainId := pc.Uint64() // GenerateTypedData TypedData var ethermintCodec codec.ProtoCodecMarshaler fee := legacytx.NewStdFee(gas, gasAmount) accNumber := suite.app.AccountKeeper.GetAccount(suite.ctx, from).GetAccountNumber() data := legacytx.StdSignBytes(chainId, accNumber, nonce, 0, fee, []sdk.Msg{msg}, "", nil) typedData, err := eip712.WrapTxToTypedData(ethermintCodec, ethChainId, msg, data, &eip712.FeeDelegationOptions{ FeePayer: from, }) suite.Require().NoError(err) sigHash, err := eip712.ComputeTypedDataHash(typedData) suite.Require().NoError(err) // Sign typedData keyringSigner := tests.NewSigner(priv) signature, pubKey, err := keyringSigner.SignByAddress(from, sigHash) suite.Require().NoError(err) signature[crypto.RecoveryIDOffset] += 27 // Transform V from 0/1 to 27/28 according to the yellow paper // Add ExtensionOptionsWeb3Tx extension var option *codectypes.Any option, err = codectypes.NewAnyWithValue(&types.ExtensionOptionsWeb3Tx{ FeePayer: from.String(), TypedDataChainID: ethChainId, FeePayerSig: signature, }) suite.Require().NoError(err) suite.clientCtx.TxConfig.SignModeHandler() txBuilder := suite.clientCtx.TxConfig.NewTxBuilder() builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder) suite.Require().True(ok) builder.SetExtensionOptions(option) builder.SetFeeAmount(gasAmount) builder.SetGasLimit(gas) sigsV2 := signing.SignatureV2{ PubKey: pubKey, Data: &signing.SingleSignatureData{ SignMode: signing.SignMode_SIGN_MODE_LEGACY_AMINO_JSON, }, Sequence: nonce, } err = builder.SetSignatures(sigsV2) suite.Require().NoError(err) err = builder.SetMsgs(msg) suite.Require().NoError(err) return builder } func NextFn(ctx sdk.Context, _ sdk.Tx, _ bool) (sdk.Context, error) { return ctx, nil } var _ sdk.Tx = &invalidTx{} type invalidTx struct{} func (invalidTx) GetMsgs() []sdk.Msg { return []sdk.Msg{nil} } func (invalidTx) ValidateBasic() error { return nil }
random_line_split
utils_test.go
package ante_test import ( "math" "math/big" "testing" "time" sdkmath "cosmossdk.io/math" "github.com/stretchr/testify/suite" "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/x/auth/migrations/legacytx" types2 "github.com/cosmos/cosmos-sdk/x/bank/types" types3 "github.com/cosmos/cosmos-sdk/x/staking/types" "github.com/ethereum/go-ethereum/crypto" "github.com/evmos/ethermint/ethereum/eip712" "github.com/evmos/ethermint/types" "github.com/ethereum/go-ethereum/common" ethtypes "github.com/ethereum/go-ethereum/core/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/tx" codectypes "github.com/cosmos/cosmos-sdk/codec/types" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" "github.com/cosmos/cosmos-sdk/simapp" "github.com/cosmos/cosmos-sdk/testutil/testdata" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/tx/signing" authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing" authtx "github.com/cosmos/cosmos-sdk/x/auth/tx" authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" "github.com/evmos/ethermint/app" ante "github.com/evmos/ethermint/app/ante" "github.com/evmos/ethermint/encoding" "github.com/evmos/ethermint/tests" "github.com/evmos/ethermint/x/evm/statedb" evmtypes "github.com/evmos/ethermint/x/evm/types" feemarkettypes "github.com/evmos/ethermint/x/feemarket/types" tmproto "github.com/tendermint/tendermint/proto/tendermint/types" ) type AnteTestSuite struct { suite.Suite ctx sdk.Context app *app.EthermintApp clientCtx client.Context anteHandler sdk.AnteHandler ethSigner ethtypes.Signer enableFeemarket bool enableLondonHF bool evmParamsOption func(*evmtypes.Params) } const TestGasLimit uint64 = 100000 func (suite *AnteTestSuite) StateDB() *statedb.StateDB { return statedb.New(suite.ctx, suite.app.EvmKeeper, statedb.NewEmptyTxConfig(common.BytesToHash(suite.ctx.HeaderHash().Bytes()))) } func (suite *AnteTestSuite) SetupTest() { checkTx := false suite.app = app.Setup(checkTx, func(app *app.EthermintApp, genesis simapp.GenesisState) simapp.GenesisState { if suite.enableFeemarket { // setup feemarketGenesis params feemarketGenesis := feemarkettypes.DefaultGenesisState() feemarketGenesis.Params.EnableHeight = 1 feemarketGenesis.Params.NoBaseFee = false // Verify feeMarket genesis err := feemarketGenesis.Validate() suite.Require().NoError(err) genesis[feemarkettypes.ModuleName] = app.AppCodec().MustMarshalJSON(feemarketGenesis) } evmGenesis := evmtypes.DefaultGenesisState() evmGenesis.Params.AllowUnprotectedTxs = false if !suite.enableLondonHF { maxInt := sdkmath.NewInt(math.MaxInt64) evmGenesis.Params.ChainConfig.LondonBlock = &maxInt evmGenesis.Params.ChainConfig.ArrowGlacierBlock = &maxInt evmGenesis.Params.ChainConfig.GrayGlacierBlock = &maxInt evmGenesis.Params.ChainConfig.MergeNetsplitBlock = &maxInt } if suite.evmParamsOption != nil { suite.evmParamsOption(&evmGenesis.Params) } genesis[evmtypes.ModuleName] = app.AppCodec().MustMarshalJSON(evmGenesis) return genesis }) suite.ctx = suite.app.BaseApp.NewContext(checkTx, tmproto.Header{Height: 2, ChainID: "ethermint_9000-1", Time: time.Now().UTC()}) suite.ctx = suite.ctx.WithMinGasPrices(sdk.NewDecCoins(sdk.NewDecCoin(evmtypes.DefaultEVMDenom, sdk.OneInt()))) suite.ctx = suite.ctx.WithBlockGasMeter(sdk.NewGasMeter(1000000000000000000)) suite.app.EvmKeeper.WithChainID(suite.ctx) infCtx := suite.ctx.WithGasMeter(sdk.NewInfiniteGasMeter()) suite.app.AccountKeeper.SetParams(infCtx, authtypes.DefaultParams()) encodingConfig := encoding.MakeConfig(app.ModuleBasics) // We're using TestMsg amino encoding in some tests, so register it here. encodingConfig.Amino.RegisterConcrete(&testdata.TestMsg{}, "testdata.TestMsg", nil) suite.clientCtx = client.Context{}.WithTxConfig(encodingConfig.TxConfig) anteHandler, err := ante.NewAnteHandler(ante.HandlerOptions{ AccountKeeper: suite.app.AccountKeeper, BankKeeper: suite.app.BankKeeper, EvmKeeper: suite.app.EvmKeeper, FeegrantKeeper: suite.app.FeeGrantKeeper, IBCKeeper: suite.app.IBCKeeper, FeeMarketKeeper: suite.app.FeeMarketKeeper, SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), SigGasConsumer: ante.DefaultSigVerificationGasConsumer, }) suite.Require().NoError(err) suite.anteHandler = anteHandler suite.ethSigner = ethtypes.LatestSignerForChainID(suite.app.EvmKeeper.ChainID()) } func TestAnteTestSuite(t *testing.T) { suite.Run(t, &AnteTestSuite{ enableLondonHF: true, }) } func (s *AnteTestSuite) BuildTestEthTx( from common.Address, to common.Address, amount *big.Int, input []byte, gasPrice *big.Int, gasFeeCap *big.Int, gasTipCap *big.Int, accesses *ethtypes.AccessList, ) *evmtypes.MsgEthereumTx { chainID := s.app.EvmKeeper.ChainID() nonce := s.app.EvmKeeper.GetNonce( s.ctx, common.BytesToAddress(from.Bytes()), ) msgEthereumTx := evmtypes.NewTx( chainID, nonce, &to, amount, TestGasLimit, gasPrice, gasFeeCap, gasTipCap, input, accesses, ) msgEthereumTx.From = from.String() return msgEthereumTx } // CreateTestTx is a helper function to create a tx given multiple inputs. func (suite *AnteTestSuite) CreateTestTx( msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool, unsetExtensionOptions ...bool, ) authsigning.Tx { return suite.CreateTestTxBuilder(msg, priv, accNum, signCosmosTx).GetTx() } // CreateTestTxBuilder is a helper function to create a tx builder given multiple inputs. func (suite *AnteTestSuite) CreateTestTxBuilder( msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool, unsetExtensionOptions ...bool, ) client.TxBuilder { var option *codectypes.Any var err error if len(unsetExtensionOptions) == 0 { option, err = codectypes.NewAnyWithValue(&evmtypes.ExtensionOptionsEthereumTx{}) suite.Require().NoError(err) } txBuilder := suite.clientCtx.TxConfig.NewTxBuilder() builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder) suite.Require().True(ok) if len(unsetExtensionOptions) == 0 { builder.SetExtensionOptions(option) } err = msg.Sign(suite.ethSigner, tests.NewSigner(priv)) suite.Require().NoError(err) msg.From = "" err = builder.SetMsgs(msg) suite.Require().NoError(err) txData, err := evmtypes.UnpackTxData(msg.Data) suite.Require().NoError(err) fees := sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewIntFromBigInt(txData.Fee()))) builder.SetFeeAmount(fees) builder.SetGasLimit(msg.GetGas()) if signCosmosTx
return txBuilder } func (suite *AnteTestSuite) CreateTestCosmosTxBuilder(gasPrice sdkmath.Int, denom string, msgs ...sdk.Msg) client.TxBuilder { txBuilder := suite.clientCtx.TxConfig.NewTxBuilder() txBuilder.SetGasLimit(TestGasLimit) fees := &sdk.Coins{{Denom: denom, Amount: gasPrice.MulRaw(int64(TestGasLimit))}} txBuilder.SetFeeAmount(*fees) err := txBuilder.SetMsgs(msgs...) suite.Require().NoError(err) return txBuilder } func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgSend(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder { // Build MsgSend recipient := sdk.AccAddress(common.Address{}.Bytes()) msgSend := types2.NewMsgSend(from, recipient, sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewInt(1)))) return suite.CreateTestEIP712CosmosTxBuilder(from, priv, chainId, gas, gasAmount, msgSend) } func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgDelegate(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder { // Build MsgSend valEthAddr := tests.GenerateAddress() valAddr := sdk.ValAddress(valEthAddr.Bytes()) msgSend := types3.NewMsgDelegate(from, valAddr, sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewInt(20))) return suite.CreateTestEIP712CosmosTxBuilder(from, priv, chainId, gas, gasAmount, msgSend) } func (suite *AnteTestSuite) CreateTestEIP712CosmosTxBuilder( from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins, msg sdk.Msg, ) client.TxBuilder { var err error nonce, err := suite.app.AccountKeeper.GetSequence(suite.ctx, from) suite.Require().NoError(err) pc, err := types.ParseChainID(chainId) suite.Require().NoError(err) ethChainId := pc.Uint64() // GenerateTypedData TypedData var ethermintCodec codec.ProtoCodecMarshaler fee := legacytx.NewStdFee(gas, gasAmount) accNumber := suite.app.AccountKeeper.GetAccount(suite.ctx, from).GetAccountNumber() data := legacytx.StdSignBytes(chainId, accNumber, nonce, 0, fee, []sdk.Msg{msg}, "", nil) typedData, err := eip712.WrapTxToTypedData(ethermintCodec, ethChainId, msg, data, &eip712.FeeDelegationOptions{ FeePayer: from, }) suite.Require().NoError(err) sigHash, err := eip712.ComputeTypedDataHash(typedData) suite.Require().NoError(err) // Sign typedData keyringSigner := tests.NewSigner(priv) signature, pubKey, err := keyringSigner.SignByAddress(from, sigHash) suite.Require().NoError(err) signature[crypto.RecoveryIDOffset] += 27 // Transform V from 0/1 to 27/28 according to the yellow paper // Add ExtensionOptionsWeb3Tx extension var option *codectypes.Any option, err = codectypes.NewAnyWithValue(&types.ExtensionOptionsWeb3Tx{ FeePayer: from.String(), TypedDataChainID: ethChainId, FeePayerSig: signature, }) suite.Require().NoError(err) suite.clientCtx.TxConfig.SignModeHandler() txBuilder := suite.clientCtx.TxConfig.NewTxBuilder() builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder) suite.Require().True(ok) builder.SetExtensionOptions(option) builder.SetFeeAmount(gasAmount) builder.SetGasLimit(gas) sigsV2 := signing.SignatureV2{ PubKey: pubKey, Data: &signing.SingleSignatureData{ SignMode: signing.SignMode_SIGN_MODE_LEGACY_AMINO_JSON, }, Sequence: nonce, } err = builder.SetSignatures(sigsV2) suite.Require().NoError(err) err = builder.SetMsgs(msg) suite.Require().NoError(err) return builder } func NextFn(ctx sdk.Context, _ sdk.Tx, _ bool) (sdk.Context, error) { return ctx, nil } var _ sdk.Tx = &invalidTx{} type invalidTx struct{} func (invalidTx) GetMsgs() []sdk.Msg { return []sdk.Msg{nil} } func (invalidTx) ValidateBasic() error { return nil }
{ // First round: we gather all the signer infos. We use the "set empty // signature" hack to do that. sigV2 := signing.SignatureV2{ PubKey: priv.PubKey(), Data: &signing.SingleSignatureData{ SignMode: suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(), Signature: nil, }, Sequence: txData.GetNonce(), } sigsV2 := []signing.SignatureV2{sigV2} err = txBuilder.SetSignatures(sigsV2...) suite.Require().NoError(err) // Second round: all signer infos are set, so each signer can sign. signerData := authsigning.SignerData{ ChainID: suite.ctx.ChainID(), AccountNumber: accNum, Sequence: txData.GetNonce(), } sigV2, err = tx.SignWithPrivKey( suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(), signerData, txBuilder, priv, suite.clientCtx.TxConfig, txData.GetNonce(), ) suite.Require().NoError(err) sigsV2 = []signing.SignatureV2{sigV2} err = txBuilder.SetSignatures(sigsV2...) suite.Require().NoError(err) }
conditional_block
utils_test.go
package ante_test import ( "math" "math/big" "testing" "time" sdkmath "cosmossdk.io/math" "github.com/stretchr/testify/suite" "github.com/cosmos/cosmos-sdk/codec" "github.com/cosmos/cosmos-sdk/x/auth/migrations/legacytx" types2 "github.com/cosmos/cosmos-sdk/x/bank/types" types3 "github.com/cosmos/cosmos-sdk/x/staking/types" "github.com/ethereum/go-ethereum/crypto" "github.com/evmos/ethermint/ethereum/eip712" "github.com/evmos/ethermint/types" "github.com/ethereum/go-ethereum/common" ethtypes "github.com/ethereum/go-ethereum/core/types" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/tx" codectypes "github.com/cosmos/cosmos-sdk/codec/types" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" "github.com/cosmos/cosmos-sdk/simapp" "github.com/cosmos/cosmos-sdk/testutil/testdata" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/tx/signing" authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing" authtx "github.com/cosmos/cosmos-sdk/x/auth/tx" authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" "github.com/evmos/ethermint/app" ante "github.com/evmos/ethermint/app/ante" "github.com/evmos/ethermint/encoding" "github.com/evmos/ethermint/tests" "github.com/evmos/ethermint/x/evm/statedb" evmtypes "github.com/evmos/ethermint/x/evm/types" feemarkettypes "github.com/evmos/ethermint/x/feemarket/types" tmproto "github.com/tendermint/tendermint/proto/tendermint/types" ) type AnteTestSuite struct { suite.Suite ctx sdk.Context app *app.EthermintApp clientCtx client.Context anteHandler sdk.AnteHandler ethSigner ethtypes.Signer enableFeemarket bool enableLondonHF bool evmParamsOption func(*evmtypes.Params) } const TestGasLimit uint64 = 100000 func (suite *AnteTestSuite) StateDB() *statedb.StateDB { return statedb.New(suite.ctx, suite.app.EvmKeeper, statedb.NewEmptyTxConfig(common.BytesToHash(suite.ctx.HeaderHash().Bytes()))) } func (suite *AnteTestSuite) SetupTest() { checkTx := false suite.app = app.Setup(checkTx, func(app *app.EthermintApp, genesis simapp.GenesisState) simapp.GenesisState { if suite.enableFeemarket { // setup feemarketGenesis params feemarketGenesis := feemarkettypes.DefaultGenesisState() feemarketGenesis.Params.EnableHeight = 1 feemarketGenesis.Params.NoBaseFee = false // Verify feeMarket genesis err := feemarketGenesis.Validate() suite.Require().NoError(err) genesis[feemarkettypes.ModuleName] = app.AppCodec().MustMarshalJSON(feemarketGenesis) } evmGenesis := evmtypes.DefaultGenesisState() evmGenesis.Params.AllowUnprotectedTxs = false if !suite.enableLondonHF { maxInt := sdkmath.NewInt(math.MaxInt64) evmGenesis.Params.ChainConfig.LondonBlock = &maxInt evmGenesis.Params.ChainConfig.ArrowGlacierBlock = &maxInt evmGenesis.Params.ChainConfig.GrayGlacierBlock = &maxInt evmGenesis.Params.ChainConfig.MergeNetsplitBlock = &maxInt } if suite.evmParamsOption != nil { suite.evmParamsOption(&evmGenesis.Params) } genesis[evmtypes.ModuleName] = app.AppCodec().MustMarshalJSON(evmGenesis) return genesis }) suite.ctx = suite.app.BaseApp.NewContext(checkTx, tmproto.Header{Height: 2, ChainID: "ethermint_9000-1", Time: time.Now().UTC()}) suite.ctx = suite.ctx.WithMinGasPrices(sdk.NewDecCoins(sdk.NewDecCoin(evmtypes.DefaultEVMDenom, sdk.OneInt()))) suite.ctx = suite.ctx.WithBlockGasMeter(sdk.NewGasMeter(1000000000000000000)) suite.app.EvmKeeper.WithChainID(suite.ctx) infCtx := suite.ctx.WithGasMeter(sdk.NewInfiniteGasMeter()) suite.app.AccountKeeper.SetParams(infCtx, authtypes.DefaultParams()) encodingConfig := encoding.MakeConfig(app.ModuleBasics) // We're using TestMsg amino encoding in some tests, so register it here. encodingConfig.Amino.RegisterConcrete(&testdata.TestMsg{}, "testdata.TestMsg", nil) suite.clientCtx = client.Context{}.WithTxConfig(encodingConfig.TxConfig) anteHandler, err := ante.NewAnteHandler(ante.HandlerOptions{ AccountKeeper: suite.app.AccountKeeper, BankKeeper: suite.app.BankKeeper, EvmKeeper: suite.app.EvmKeeper, FeegrantKeeper: suite.app.FeeGrantKeeper, IBCKeeper: suite.app.IBCKeeper, FeeMarketKeeper: suite.app.FeeMarketKeeper, SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), SigGasConsumer: ante.DefaultSigVerificationGasConsumer, }) suite.Require().NoError(err) suite.anteHandler = anteHandler suite.ethSigner = ethtypes.LatestSignerForChainID(suite.app.EvmKeeper.ChainID()) } func TestAnteTestSuite(t *testing.T) { suite.Run(t, &AnteTestSuite{ enableLondonHF: true, }) } func (s *AnteTestSuite) BuildTestEthTx( from common.Address, to common.Address, amount *big.Int, input []byte, gasPrice *big.Int, gasFeeCap *big.Int, gasTipCap *big.Int, accesses *ethtypes.AccessList, ) *evmtypes.MsgEthereumTx { chainID := s.app.EvmKeeper.ChainID() nonce := s.app.EvmKeeper.GetNonce( s.ctx, common.BytesToAddress(from.Bytes()), ) msgEthereumTx := evmtypes.NewTx( chainID, nonce, &to, amount, TestGasLimit, gasPrice, gasFeeCap, gasTipCap, input, accesses, ) msgEthereumTx.From = from.String() return msgEthereumTx } // CreateTestTx is a helper function to create a tx given multiple inputs. func (suite *AnteTestSuite) CreateTestTx( msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool, unsetExtensionOptions ...bool, ) authsigning.Tx { return suite.CreateTestTxBuilder(msg, priv, accNum, signCosmosTx).GetTx() } // CreateTestTxBuilder is a helper function to create a tx builder given multiple inputs. func (suite *AnteTestSuite) CreateTestTxBuilder( msg *evmtypes.MsgEthereumTx, priv cryptotypes.PrivKey, accNum uint64, signCosmosTx bool, unsetExtensionOptions ...bool, ) client.TxBuilder { var option *codectypes.Any var err error if len(unsetExtensionOptions) == 0 { option, err = codectypes.NewAnyWithValue(&evmtypes.ExtensionOptionsEthereumTx{}) suite.Require().NoError(err) } txBuilder := suite.clientCtx.TxConfig.NewTxBuilder() builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder) suite.Require().True(ok) if len(unsetExtensionOptions) == 0 { builder.SetExtensionOptions(option) } err = msg.Sign(suite.ethSigner, tests.NewSigner(priv)) suite.Require().NoError(err) msg.From = "" err = builder.SetMsgs(msg) suite.Require().NoError(err) txData, err := evmtypes.UnpackTxData(msg.Data) suite.Require().NoError(err) fees := sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewIntFromBigInt(txData.Fee()))) builder.SetFeeAmount(fees) builder.SetGasLimit(msg.GetGas()) if signCosmosTx { // First round: we gather all the signer infos. We use the "set empty // signature" hack to do that. sigV2 := signing.SignatureV2{ PubKey: priv.PubKey(), Data: &signing.SingleSignatureData{ SignMode: suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(), Signature: nil, }, Sequence: txData.GetNonce(), } sigsV2 := []signing.SignatureV2{sigV2} err = txBuilder.SetSignatures(sigsV2...) suite.Require().NoError(err) // Second round: all signer infos are set, so each signer can sign. signerData := authsigning.SignerData{ ChainID: suite.ctx.ChainID(), AccountNumber: accNum, Sequence: txData.GetNonce(), } sigV2, err = tx.SignWithPrivKey( suite.clientCtx.TxConfig.SignModeHandler().DefaultMode(), signerData, txBuilder, priv, suite.clientCtx.TxConfig, txData.GetNonce(), ) suite.Require().NoError(err) sigsV2 = []signing.SignatureV2{sigV2} err = txBuilder.SetSignatures(sigsV2...) suite.Require().NoError(err) } return txBuilder } func (suite *AnteTestSuite) CreateTestCosmosTxBuilder(gasPrice sdkmath.Int, denom string, msgs ...sdk.Msg) client.TxBuilder { txBuilder := suite.clientCtx.TxConfig.NewTxBuilder() txBuilder.SetGasLimit(TestGasLimit) fees := &sdk.Coins{{Denom: denom, Amount: gasPrice.MulRaw(int64(TestGasLimit))}} txBuilder.SetFeeAmount(*fees) err := txBuilder.SetMsgs(msgs...) suite.Require().NoError(err) return txBuilder } func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgSend(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder
func (suite *AnteTestSuite) CreateTestEIP712TxBuilderMsgDelegate(from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins) client.TxBuilder { // Build MsgSend valEthAddr := tests.GenerateAddress() valAddr := sdk.ValAddress(valEthAddr.Bytes()) msgSend := types3.NewMsgDelegate(from, valAddr, sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewInt(20))) return suite.CreateTestEIP712CosmosTxBuilder(from, priv, chainId, gas, gasAmount, msgSend) } func (suite *AnteTestSuite) CreateTestEIP712CosmosTxBuilder( from sdk.AccAddress, priv cryptotypes.PrivKey, chainId string, gas uint64, gasAmount sdk.Coins, msg sdk.Msg, ) client.TxBuilder { var err error nonce, err := suite.app.AccountKeeper.GetSequence(suite.ctx, from) suite.Require().NoError(err) pc, err := types.ParseChainID(chainId) suite.Require().NoError(err) ethChainId := pc.Uint64() // GenerateTypedData TypedData var ethermintCodec codec.ProtoCodecMarshaler fee := legacytx.NewStdFee(gas, gasAmount) accNumber := suite.app.AccountKeeper.GetAccount(suite.ctx, from).GetAccountNumber() data := legacytx.StdSignBytes(chainId, accNumber, nonce, 0, fee, []sdk.Msg{msg}, "", nil) typedData, err := eip712.WrapTxToTypedData(ethermintCodec, ethChainId, msg, data, &eip712.FeeDelegationOptions{ FeePayer: from, }) suite.Require().NoError(err) sigHash, err := eip712.ComputeTypedDataHash(typedData) suite.Require().NoError(err) // Sign typedData keyringSigner := tests.NewSigner(priv) signature, pubKey, err := keyringSigner.SignByAddress(from, sigHash) suite.Require().NoError(err) signature[crypto.RecoveryIDOffset] += 27 // Transform V from 0/1 to 27/28 according to the yellow paper // Add ExtensionOptionsWeb3Tx extension var option *codectypes.Any option, err = codectypes.NewAnyWithValue(&types.ExtensionOptionsWeb3Tx{ FeePayer: from.String(), TypedDataChainID: ethChainId, FeePayerSig: signature, }) suite.Require().NoError(err) suite.clientCtx.TxConfig.SignModeHandler() txBuilder := suite.clientCtx.TxConfig.NewTxBuilder() builder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder) suite.Require().True(ok) builder.SetExtensionOptions(option) builder.SetFeeAmount(gasAmount) builder.SetGasLimit(gas) sigsV2 := signing.SignatureV2{ PubKey: pubKey, Data: &signing.SingleSignatureData{ SignMode: signing.SignMode_SIGN_MODE_LEGACY_AMINO_JSON, }, Sequence: nonce, } err = builder.SetSignatures(sigsV2) suite.Require().NoError(err) err = builder.SetMsgs(msg) suite.Require().NoError(err) return builder } func NextFn(ctx sdk.Context, _ sdk.Tx, _ bool) (sdk.Context, error) { return ctx, nil } var _ sdk.Tx = &invalidTx{} type invalidTx struct{} func (invalidTx) GetMsgs() []sdk.Msg { return []sdk.Msg{nil} } func (invalidTx) ValidateBasic() error { return nil }
{ // Build MsgSend recipient := sdk.AccAddress(common.Address{}.Bytes()) msgSend := types2.NewMsgSend(from, recipient, sdk.NewCoins(sdk.NewCoin(evmtypes.DefaultEVMDenom, sdkmath.NewInt(1)))) return suite.CreateTestEIP712CosmosTxBuilder(from, priv, chainId, gas, gasAmount, msgSend) }
identifier_body
RealTimePlotTemplate.py
from multiprocessing import Process, Manager,Array,Value, Lock from sklearn import preprocessing from ctypes import c_bool import math import DataStructure import signal import binascii from sklearn import preprocessing from sklearn.decomposition import PCA import time import struct import threading import pickle import collections import json,httplib import select import signal import sys import os from pynput import keyboard # import pyqtgraph as pg import numpy as np from bluepy import btle import fastdtw import DTW #---for save data #import self-defined module-------------------------------------------------------------- sys.path.append("OpenGL") import myOpenGL from myOpenGL import myCube import QTRealTimeScatter import QTwebcam import QTRealLine import Mahony import scipy #signal.signal(signal.SIGALRM, handler) # matplotlib.use('TkAgg') #global variables ----------------------------------------------------------------------- numOfDongle = 3 #ifae 0 dongle is responsible for scanning peripheral #list of node connecting #conn_list = [] #myopenGL's object saveData = [] acc_divider = 4095.999718 #acc_divider = 4096 gyro_divider = 65.500002 #gyro_divider = 65.5 DEG2RAD = 0.01745329251 ccount=1 #mahony_list = [] limit_num_of_received_data=50000 # fp = open('dataset1.dat', "wb") #append # fp1 = open('KNNClassifier.dat', "rb") index = 0 KNNmodel=0 lastYaw = 1000 Trainning_minimum=[] Trainning_diff=[] #class ----------------------------------------------------------------------------------- lastYaw=0 data1=np.NAN data2=np.NAN data3=np.NAN ''' [xxx]: xxx is a function Calibration: Acc: calculate calibration value beforhand Gyro: calculate at phase 1(startup phasess) in the function: [GetBacicData] Mag: calculate calibration value beforhand Process: # [ScanProcess](Scan) -> startup phasess ( [GetBacicData](connect, get Gyro calibaration value and stopping threshold) ) # -> create plot process([QTRun]) -> # Get IMU data by notification -> mackdwick(get angle) # -> remove gravity -> smoothing data # every 0.05 to execute the function [realtime] **** 0.05 is the window size Data: Inst.dataBlockSet: ['Acc'] : rawXYZ(save raw data), filteredXYZ(save filterd data) ['Gyo'] : rawXYZ(save raw data), filteredXYZ(save filterd data) ['Mag'] : rawXYZ(save raw data), filteredXYZ(save filterd data) ['Angle'] : rawXYZ(save raw data), filteredXYZ(save filterd data) We keep the sensor and angle data whatever the user is in the static or moving state So use the other varible(Inst.Windows.workingIDX) to keep the index of data when moving Inst.Windows.workingIDX : the index of data when moving ''' class MyDelegate(btle.DefaultDelegate): def __init__(self,node ): btle.DefaultDelegate.__init__(self) self.node = node def handleNotification(self, cHandle, data): b2a = binascii.b2a_hex(data) self.node.noti = Uint4Toshort([b2a[0:4],b2a[4:8],b2a[8:12],b2a[12:16],b2a[16:20],b2a[20:24],b2a[24:28],b2a[28:32],b2a[32:36],b2a[36:40]]) # ... perhaps check cHandle # ... process 'data' class myNode(object): '''' a class to maintain connection and the calibration value of sensors''' def __init__(self): self.Peripheral = None self.nodeCube = None self.drawWindowNumber = -1 self.accBias = [0.0,0.0,0.0] self.gyroBias = [0.0,0.0,0.0] self.magBias = [0.0,0.0,0.0] self.magScale = [0.0,0.0,0.0] self.magCalibration = [0.0,0.0,0.0] self.noti = None self.fail_notify=0 self.workingtime=0.0 self.datagram=[] self.seq=0 self.count_received_data=0 S = np.array([[ 2.42754810e-04, 3.41614666e-07, -2.07507663e-07], [ 3.41614666e-07, 2.43926399e-04, 1.68822071e-07], [ -2.07507663e-07, 1.68822071e-07, 2.43800712e-04]]) B = [-28.43905915, 51.22161875, -72.33527491] global S,B def BLEconnection(connNode,addr,connType,iface): ''' do ble connection ''' connNode.Peripheral = btle.Peripheral(addr , connType , iface = iface) connNode.Peripheral.setDelegate(MyDelegate(connNode)) magCalibration = binascii.b2a_hex(connNode.Peripheral.readCharacteristic(0x4C)) calibrationData = [magCalibration[0:8], magCalibration[8:16], magCalibration[16:24]] connNode.magCalibration = Uint8Tofloat(calibrationData) connNode.accBias = [-0.039746094, -0.012792969, -0.056347656] connNode.gyroBias = [1.477862573, 0.088549618, -1.477862597] # connNode.magBias = [57.712502, 27.521484, -37.898438 ] # connNode.magScale = [0.990893, 1.042146, 0.969697] connNode.magBias = [52.190625, 26.627929687499996, -24.46171875] connNode.magScale = [1.0418410041841004, 0.9688715953307393, 0.9920318725099602] #home # connNode.magBias = [48.312499, 41.460943, -21.877735 ] # connNode.magScale = [1.005747, 1.009227, 0.985360] print("accScales: ",S) print("accBias: ",B) # print("gyroBias: ",connNode.gyroBias) print("magBias: ",connNode.magBias) print("magScale: ",connNode.magScale) print("magCalibration: ",connNode.magCalibration) print("connect successfully") #connNode.setCalValue(connNode.accBias, connNode.gyroBias,connNode.magBias,connNode.magScale,connNode.magCalibration) #iface = (iface + 1) % numOfDongle + 1 #Try to get Service , Characteristic and set notification try: #need to add 0000fed0-0000-1000-8000-00805f9b34fb service = connNode.Peripheral.getServiceByUUID("0000FED0-0000-1000-8000-00805f9b34fb") char = service.getCharacteristics("0000FED7-0000-1000-8000-00805f9b34fb")[0] connNode.Peripheral.writeCharacteristic(char.handle + 2,struct.pack('<bb', 0x01, 0x00),True) except: print("get service, characteristic or set notification failed") def ScanProcess(iface=0):
def struct_isqrt(number): threehalfs = 1.5 x2 = number * 0.5 y = number packed_y = struct.pack('f', y) i = struct.unpack('i', packed_y)[0] # treat float's bytes as int i = 0x5f3759df - (i >> 1) # arithmetic with magic number packed_i = struct.pack('i', i) y = struct.unpack('f', packed_i)[0] # treat int's bytes as float y = y * (threehalfs - (x2 * y * y)) # Newton's method return y def QTRun(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,dataLengthList,Timestamp,Idx,resetFlag,isStatic): data=[[],[],[],[],[],[]] windowsLen = [] # print "xxxxxxxxxxxxxxxx" while True: # continue tEnd = time.time() while isStatic.value == True: pass if resetFlag.value == True: plotMyData.ResetGraph() resetFlag.value = False endIdx = Idx.value data[0]= plot1[0:endIdx] data[1]= plot2[0:endIdx] data[2]= plot3[0:endIdx] data[3]= plot4[0:endIdx] data[4]= plot5[0:endIdx] data[5]= plot6[0:endIdx] windowsLen.append([0,1]) windowsLen.append([0,1]) windowsLen.append([0,1]) isStatic.value = True # data[3].append(plot4.value) # data[4].append(plot5.value) # data[5].append(plot6.value) # data[6].append(timestamp.value) #,isCapturing.value plotMyData.setMyData(data,windowsLen) #isCapturing.value data=[[],[],[],[],[],[],[]] windowsLen = [] # tStart = time.time() def QTWebCam(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,Timestamp,isCapturing,isStatic,resetFlag): data=[[],[],[],[],[],[],[]] windowsLen = [] while True: tStart = time.time() while isStatic.value == True: pass if resetFlag.value == True: plotMyData.ResetGraph() resetFlag.value = False endIdx = Idx.value data[0]= plot1[0:endIdx] data[1]= plot2[0:endIdx] data[2]= plot3[0:endIdx] data[3]= plot4[0:endIdx] data[4]= plot5[0:endIdx] data[5]= plot6[0:endIdx] data[6]= Timestamp[0:endIdx] plotMyData.setMyData(data,isCapturing.value) data=[[],[],[],[],[],[],[],[]] windowsLen = [] tStart = time.time() isStatic.value = True def GetBacicData(node,addr,connType,mahony,iface): '''Get the stopping threshold and the calibration of gyro Args: node : addr : sensor ble address connType : pubilc/ramdon iface : which dongle you use to construct the connection ''' yawCalibration=0.0 BLEconnection(node,addr,connType,iface=iface) count = 0 gravity = 0 staticLinearAcc = [] staticLinearGyo = [] print "Do not moving!!!" while count!= 300: if node.Peripheral.waitForNotifications(0.01): count = count + 1 rawdata = node.noti node.gyroBias[0] += rawdata[3] node.gyroBias[1] += rawdata[4] node.gyroBias[2] += rawdata[5] node.gyroBias[0] = node.gyroBias[0]/gyro_divider/300 node.gyroBias[1] = node.gyroBias[1]/gyro_divider/300 node.gyroBias[2] = node.gyroBias[2]/gyro_divider/300 # print yawCalibration def Uint4Toshort(tenData): #print(threeData) retVal =[] for data in tenData: #(data) i = 0 byteArray = [] while(i != 4): byteArray.append(int(data[i:i+2], 16)) #print(int(data, 16)) i=i+2 b = ''.join(chr(i) for i in byteArray) if data == tenData[9]: retVal.append(struct.unpack('<H',b)[0]) else: retVal.append(struct.unpack('<h',b)[0]) # print retVal return retVal def Uint8Tofloat(threeData): #print(threeData) retVal =[] for data in threeData: #(data) i = 0 byteArray = [] while(i != 8): byteArray.append(int(data[i:i+2], 16)) #print(int(data, 16)) i=i+2 b = ''.join(chr(i) for i in byteArray) retVal.append(struct.unpack('<f',b)[0]) return retVal gravityList = [0.0,0.0,1.0] S = np.array([[ 2.42754810e-04, 3.41614666e-07, -2.07507663e-07], [ 3.41614666e-07, 2.43926399e-04, 1.68822071e-07], [ -2.07507663e-07, 1.68822071e-07, 2.43800712e-04]]) B = [-28.43905915, 51.22161875, -72.33527491] global S,B def getYawByMag(normMag,Roll,Pitch): # return math.atan2( (normMag[1]*math.cos(Roll) + normMag[2]*math.sin(Roll) ) , (normMag[0]*math.cos(Pitch) + normMag[1]*math.sin(Pitch)*math.sin(Roll) - normMag[2]*math.sin(Pitch)*math.cos(Roll)) ) * 57.2957795 # return math.atan2( (normMag[0]*math.cos(Pitch) + normMag[2]*math.sin(Pitch) ) , (normMag[0]*math.sin(Pitch)*math.sin(Roll) + normMag[1]*math.cos(Roll) - normMag[2]*math.sin(Roll)*math.cos(Pitch)) ) * 57.2957795 return math.atan2(normMag[1],normMag[0])* 57.2957795 # saverState = None def on_press(key): global saverState try: k = key.char # single-char keys except: k = key.name # other keys if key == keyboard.Key.esc: return False # stop listener if k == '0': # keys interested saverState.value=0 print('Gloabl') print('Key pressed: ' + k) if k == '1': # keys interested saverState.value=1 print('Local') print('Key pressed: ' + k) elif k == '4': # keys interested saverState.value=2 print('Key pressed: ' + k) elif k == '2': # keys interested saverState.value=3 print('Key pressed: ' + k) print('Saving Mode') elif k=='c' or k == 'C': saverState.value=8 print('Key pressed: ' + k) return False elif k=='D' or k == 'd': saverState.value=9 print('Key pressed: ' + k) return False # return False # remove this if want more keys global SMAaverage global SMAList global bufferFlag def SMA(Data,maxLen): ''' simple moving average : smoothing data Args: Data : raw data removed gravity ''' global SMAaverage,SMAList,bufferFlag if SMAList.shape[0] == maxLen: bufferFlag = 1 SMAaverage = np.mean (SMAList,axis=0) SMAaverage = SMAaverage - SMAList[0,:]/float(maxLen) + Data/float(maxLen) SMAList = np.delete(SMAList, 0,axis=0) SMAList = np.concatenate ((SMAList, Data),axis=0) # print SMAaverage return SMAaverage else: SMAList = np.concatenate ((SMAList, Data),axis=0) return np.zeros((0,6)) global DTWModel global AnglePose if __name__ == '__main__': global saverState,AxisDict,XAction,YAction,ZAction,XModel,YModel,ZModel,SMAList,bufferFlag,EMAList,EMAaverage,DTWModel,JayAnglePose #Xn2p,Xn2pScaler,Xp2n,Xp2nScaler,Yn2p,Yn2pScaler,Yp2n,Yp2nScaler,Zn2p,Zn2pScaler,Zp2n,Zp2nScaler SMAList = np.zeros( (0,6) ) bufferFlag = 0 EMAList = np.zeros( (0,3) ) ret = np.zeros( (0,6) ) Acceration = np.zeros( (0,3) ) SMAaverage = None EMAaverage = None NotRecognize = 0 DTWModel = pickle.load(open("JayDTW.dat", "rb")) AnglePose = pickle.load(open("JayAnglePose.dat", "rb")) lis = keyboard.Listener(on_press=on_press) lis.start() # start to listen on a separate thread iface = 1 # try to scan it t =threading.Thread(target = ScanProcess,args=( iface, ) ) t.start() t.join() connList = [] mahony = Mahony.MahonyClass() node = myNode() connList.append("3c:cd:40:18:c1:98") #connect and Draw GetBacicData(node,connList[-1],"public",mahony,iface ) # yawCalibration = 0 staticLinearAcc = 0.0857 saverState = Value('i',-1) node.nodeCube = myCube() mydraw = myOpenGL.myDraw(node.nodeCube) plot1 = Array('f',[0.0 for i in range(0,200)]) plot2 = Array('f',[0.0 for i in range(0,200)]) plot3 = Array('f',[0.0 for i in range(0,200)]) plot4 = Array('f',[0.0 for i in range(0,200)]) plot5 = Array('f',[0.0 for i in range(0,200)]) plot6 = Array('f',[0.0 for i in range(0,200)]) plot7 = Array('i',[0 for i in range(0,650)]) Timestamp = Array('f',[0.0 for i in range(0,200)]) resetFlag = Value(c_bool,False) Idx = Value('i',-1) staticFlag = Value(c_bool,True) isCapturing = Value(c_bool,False) print "Drawing" # I am not sure it is still work well...... # QTwebcam ........ plot plotMyData = QTwebcam.QtCapture(0) plotRealTimeData = Process(target=QTWebCam,args=(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,Timestamp,isCapturing,staticFlag,resetFlag)) plotRealTimeData.daemon=True plotRealTimeData.start() # QTGraph ........ plot # plotMyData = QTRealLine.MyRealTimePlot() # plotRealTimeData = Process(target=QTRun,args=(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,plot7,Timestamp,Idx,resetFlag,staticFlag)) # plotRealTimeData.daemon=True # plotRealTimeData.start() #QTGraph ........ ScatterPlot # A scatter graph to visualize the mag and see if the value of mag is normal # plotMyData = QTRealTimeScatter.MyRealTimeScatterPlot() # plotRealTimeData = Process(target=QTRun,args=(plotMyData,plot1,plot2,plot3,flag)) # plotRealTimeData.daemon=True # plotRealTimeData.start() # how many data to plot numberOfPlot = 20 temp = 0 # how many samples used for filter numSamples = 20 while True: tStart = time.time() if node.Peripheral.waitForNotifications(0.01): gyro = [None]*3 mag = [None]*3 shortdata = node.noti if shortdata[6] != 0 and shortdata[7] != 0 and shortdata[8] != 0: mag[0] = shortdata[6]*0.15*node.magCalibration[0] - node.magBias[0] mag[1] = shortdata[7]*0.15*node.magCalibration[1] - node.magBias[1] mag[2] = shortdata[8]*0.15*node.magCalibration[2] - node.magBias[2] #print(mag) mag[0] *= node.magScale[0]*10 mag[1] *= node.magScale[1]*10 mag[2] *= node.magScale[2]*10 acc = S.dot( np.array([shortdata[0],shortdata[1],shortdata[2]]) - B) acc = np.asmatrix(acc) gyro = [None]*3 gyro[0] = (shortdata[3]/gyro_divider - node.gyroBias[0])*DEG2RAD gyro[1] = (shortdata[4]/gyro_divider - node.gyroBias[1])*DEG2RAD gyro[2] = (shortdata[5]/gyro_divider - node.gyroBias[2])*DEG2RAD gyro = np.asmatrix(gyro) #smoothing data (acc and gyro) - I found sma with buffer size 30 it is not enough for gyro filtterdData = SMA(np.concatenate( (acc,gyro),axis=1 ) ,30) if bufferFlag == 0: continue rawFilterdData = np.concatenate((acc,filtterdData[0,0:3]),axis=1 ) ret = np.concatenate( (ret,rawFilterdData),axis=0 ) if filtterdData.shape[0] != 0 : if ret.shape[0] == numberOfPlot: ret = ret.transpose() # print ret plot1[0:numberOfPlot] = ret[0,:].tolist()[0] plot2[0:numberOfPlot] = ret[1,:].tolist()[0] plot3[0:numberOfPlot] = ret[2,:].tolist()[0] plot4[0:numberOfPlot] = ret[3,:].tolist()[0] plot5[0:numberOfPlot] = ret[4,:].tolist()[0] plot6[0:numberOfPlot] = ret[5,:].tolist()[0] Idx.value = numberOfPlot staticFlag.value = False ret = np.zeros( (0,6) ) else: continue
'''Scan ''' scanner = btle.Scanner(iface) while True : print("Still scanning... count: %s" % 1) try: devcies = scanner.scan(timeout = 3) # print devcies for dev in devcies: # print "xx" if dev.addr == "3c:cd:40:18:c1:98": #3c:cd:40:18:c3:46 3c:cd:40:0b:c0:48 #3c:cd:40:0b:c1:11 #3c:cd:40:18:c1:98 print("devcies %s (%s) , RSSI = %d dB" %(dev.addr , dev.addrType , dev.rssi)) return #Try to create connection except: print "failed scan" exit()
identifier_body
RealTimePlotTemplate.py
from multiprocessing import Process, Manager,Array,Value, Lock from sklearn import preprocessing from ctypes import c_bool import math import DataStructure import signal import binascii from sklearn import preprocessing from sklearn.decomposition import PCA import time import struct import threading import pickle import collections import json,httplib import select import signal import sys import os from pynput import keyboard # import pyqtgraph as pg import numpy as np from bluepy import btle import fastdtw import DTW #---for save data #import self-defined module-------------------------------------------------------------- sys.path.append("OpenGL") import myOpenGL from myOpenGL import myCube import QTRealTimeScatter import QTwebcam import QTRealLine import Mahony import scipy #signal.signal(signal.SIGALRM, handler) # matplotlib.use('TkAgg') #global variables ----------------------------------------------------------------------- numOfDongle = 3 #ifae 0 dongle is responsible for scanning peripheral #list of node connecting #conn_list = [] #myopenGL's object saveData = [] acc_divider = 4095.999718 #acc_divider = 4096 gyro_divider = 65.500002 #gyro_divider = 65.5 DEG2RAD = 0.01745329251 ccount=1 #mahony_list = [] limit_num_of_received_data=50000 # fp = open('dataset1.dat', "wb") #append # fp1 = open('KNNClassifier.dat', "rb") index = 0 KNNmodel=0 lastYaw = 1000 Trainning_minimum=[] Trainning_diff=[] #class ----------------------------------------------------------------------------------- lastYaw=0 data1=np.NAN data2=np.NAN data3=np.NAN ''' [xxx]: xxx is a function Calibration: Acc: calculate calibration value beforhand Gyro: calculate at phase 1(startup phasess) in the function: [GetBacicData] Mag: calculate calibration value beforhand Process: # [ScanProcess](Scan) -> startup phasess ( [GetBacicData](connect, get Gyro calibaration value and stopping threshold) ) # -> create plot process([QTRun]) -> # Get IMU data by notification -> mackdwick(get angle) # -> remove gravity -> smoothing data # every 0.05 to execute the function [realtime] **** 0.05 is the window size Data: Inst.dataBlockSet: ['Acc'] : rawXYZ(save raw data), filteredXYZ(save filterd data) ['Gyo'] : rawXYZ(save raw data), filteredXYZ(save filterd data) ['Mag'] : rawXYZ(save raw data), filteredXYZ(save filterd data) ['Angle'] : rawXYZ(save raw data), filteredXYZ(save filterd data) We keep the sensor and angle data whatever the user is in the static or moving state So use the other varible(Inst.Windows.workingIDX) to keep the index of data when moving Inst.Windows.workingIDX : the index of data when moving ''' class MyDelegate(btle.DefaultDelegate): def __init__(self,node ): btle.DefaultDelegate.__init__(self) self.node = node def handleNotification(self, cHandle, data): b2a = binascii.b2a_hex(data) self.node.noti = Uint4Toshort([b2a[0:4],b2a[4:8],b2a[8:12],b2a[12:16],b2a[16:20],b2a[20:24],b2a[24:28],b2a[28:32],b2a[32:36],b2a[36:40]]) # ... perhaps check cHandle # ... process 'data' class myNode(object): '''' a class to maintain connection and the calibration value of sensors''' def
(self): self.Peripheral = None self.nodeCube = None self.drawWindowNumber = -1 self.accBias = [0.0,0.0,0.0] self.gyroBias = [0.0,0.0,0.0] self.magBias = [0.0,0.0,0.0] self.magScale = [0.0,0.0,0.0] self.magCalibration = [0.0,0.0,0.0] self.noti = None self.fail_notify=0 self.workingtime=0.0 self.datagram=[] self.seq=0 self.count_received_data=0 S = np.array([[ 2.42754810e-04, 3.41614666e-07, -2.07507663e-07], [ 3.41614666e-07, 2.43926399e-04, 1.68822071e-07], [ -2.07507663e-07, 1.68822071e-07, 2.43800712e-04]]) B = [-28.43905915, 51.22161875, -72.33527491] global S,B def BLEconnection(connNode,addr,connType,iface): ''' do ble connection ''' connNode.Peripheral = btle.Peripheral(addr , connType , iface = iface) connNode.Peripheral.setDelegate(MyDelegate(connNode)) magCalibration = binascii.b2a_hex(connNode.Peripheral.readCharacteristic(0x4C)) calibrationData = [magCalibration[0:8], magCalibration[8:16], magCalibration[16:24]] connNode.magCalibration = Uint8Tofloat(calibrationData) connNode.accBias = [-0.039746094, -0.012792969, -0.056347656] connNode.gyroBias = [1.477862573, 0.088549618, -1.477862597] # connNode.magBias = [57.712502, 27.521484, -37.898438 ] # connNode.magScale = [0.990893, 1.042146, 0.969697] connNode.magBias = [52.190625, 26.627929687499996, -24.46171875] connNode.magScale = [1.0418410041841004, 0.9688715953307393, 0.9920318725099602] #home # connNode.magBias = [48.312499, 41.460943, -21.877735 ] # connNode.magScale = [1.005747, 1.009227, 0.985360] print("accScales: ",S) print("accBias: ",B) # print("gyroBias: ",connNode.gyroBias) print("magBias: ",connNode.magBias) print("magScale: ",connNode.magScale) print("magCalibration: ",connNode.magCalibration) print("connect successfully") #connNode.setCalValue(connNode.accBias, connNode.gyroBias,connNode.magBias,connNode.magScale,connNode.magCalibration) #iface = (iface + 1) % numOfDongle + 1 #Try to get Service , Characteristic and set notification try: #need to add 0000fed0-0000-1000-8000-00805f9b34fb service = connNode.Peripheral.getServiceByUUID("0000FED0-0000-1000-8000-00805f9b34fb") char = service.getCharacteristics("0000FED7-0000-1000-8000-00805f9b34fb")[0] connNode.Peripheral.writeCharacteristic(char.handle + 2,struct.pack('<bb', 0x01, 0x00),True) except: print("get service, characteristic or set notification failed") def ScanProcess(iface=0): '''Scan ''' scanner = btle.Scanner(iface) while True : print("Still scanning... count: %s" % 1) try: devcies = scanner.scan(timeout = 3) # print devcies for dev in devcies: # print "xx" if dev.addr == "3c:cd:40:18:c1:98": #3c:cd:40:18:c3:46 3c:cd:40:0b:c0:48 #3c:cd:40:0b:c1:11 #3c:cd:40:18:c1:98 print("devcies %s (%s) , RSSI = %d dB" %(dev.addr , dev.addrType , dev.rssi)) return #Try to create connection except: print "failed scan" exit() def struct_isqrt(number): threehalfs = 1.5 x2 = number * 0.5 y = number packed_y = struct.pack('f', y) i = struct.unpack('i', packed_y)[0] # treat float's bytes as int i = 0x5f3759df - (i >> 1) # arithmetic with magic number packed_i = struct.pack('i', i) y = struct.unpack('f', packed_i)[0] # treat int's bytes as float y = y * (threehalfs - (x2 * y * y)) # Newton's method return y def QTRun(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,dataLengthList,Timestamp,Idx,resetFlag,isStatic): data=[[],[],[],[],[],[]] windowsLen = [] # print "xxxxxxxxxxxxxxxx" while True: # continue tEnd = time.time() while isStatic.value == True: pass if resetFlag.value == True: plotMyData.ResetGraph() resetFlag.value = False endIdx = Idx.value data[0]= plot1[0:endIdx] data[1]= plot2[0:endIdx] data[2]= plot3[0:endIdx] data[3]= plot4[0:endIdx] data[4]= plot5[0:endIdx] data[5]= plot6[0:endIdx] windowsLen.append([0,1]) windowsLen.append([0,1]) windowsLen.append([0,1]) isStatic.value = True # data[3].append(plot4.value) # data[4].append(plot5.value) # data[5].append(plot6.value) # data[6].append(timestamp.value) #,isCapturing.value plotMyData.setMyData(data,windowsLen) #isCapturing.value data=[[],[],[],[],[],[],[]] windowsLen = [] # tStart = time.time() def QTWebCam(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,Timestamp,isCapturing,isStatic,resetFlag): data=[[],[],[],[],[],[],[]] windowsLen = [] while True: tStart = time.time() while isStatic.value == True: pass if resetFlag.value == True: plotMyData.ResetGraph() resetFlag.value = False endIdx = Idx.value data[0]= plot1[0:endIdx] data[1]= plot2[0:endIdx] data[2]= plot3[0:endIdx] data[3]= plot4[0:endIdx] data[4]= plot5[0:endIdx] data[5]= plot6[0:endIdx] data[6]= Timestamp[0:endIdx] plotMyData.setMyData(data,isCapturing.value) data=[[],[],[],[],[],[],[],[]] windowsLen = [] tStart = time.time() isStatic.value = True def GetBacicData(node,addr,connType,mahony,iface): '''Get the stopping threshold and the calibration of gyro Args: node : addr : sensor ble address connType : pubilc/ramdon iface : which dongle you use to construct the connection ''' yawCalibration=0.0 BLEconnection(node,addr,connType,iface=iface) count = 0 gravity = 0 staticLinearAcc = [] staticLinearGyo = [] print "Do not moving!!!" while count!= 300: if node.Peripheral.waitForNotifications(0.01): count = count + 1 rawdata = node.noti node.gyroBias[0] += rawdata[3] node.gyroBias[1] += rawdata[4] node.gyroBias[2] += rawdata[5] node.gyroBias[0] = node.gyroBias[0]/gyro_divider/300 node.gyroBias[1] = node.gyroBias[1]/gyro_divider/300 node.gyroBias[2] = node.gyroBias[2]/gyro_divider/300 # print yawCalibration def Uint4Toshort(tenData): #print(threeData) retVal =[] for data in tenData: #(data) i = 0 byteArray = [] while(i != 4): byteArray.append(int(data[i:i+2], 16)) #print(int(data, 16)) i=i+2 b = ''.join(chr(i) for i in byteArray) if data == tenData[9]: retVal.append(struct.unpack('<H',b)[0]) else: retVal.append(struct.unpack('<h',b)[0]) # print retVal return retVal def Uint8Tofloat(threeData): #print(threeData) retVal =[] for data in threeData: #(data) i = 0 byteArray = [] while(i != 8): byteArray.append(int(data[i:i+2], 16)) #print(int(data, 16)) i=i+2 b = ''.join(chr(i) for i in byteArray) retVal.append(struct.unpack('<f',b)[0]) return retVal gravityList = [0.0,0.0,1.0] S = np.array([[ 2.42754810e-04, 3.41614666e-07, -2.07507663e-07], [ 3.41614666e-07, 2.43926399e-04, 1.68822071e-07], [ -2.07507663e-07, 1.68822071e-07, 2.43800712e-04]]) B = [-28.43905915, 51.22161875, -72.33527491] global S,B def getYawByMag(normMag,Roll,Pitch): # return math.atan2( (normMag[1]*math.cos(Roll) + normMag[2]*math.sin(Roll) ) , (normMag[0]*math.cos(Pitch) + normMag[1]*math.sin(Pitch)*math.sin(Roll) - normMag[2]*math.sin(Pitch)*math.cos(Roll)) ) * 57.2957795 # return math.atan2( (normMag[0]*math.cos(Pitch) + normMag[2]*math.sin(Pitch) ) , (normMag[0]*math.sin(Pitch)*math.sin(Roll) + normMag[1]*math.cos(Roll) - normMag[2]*math.sin(Roll)*math.cos(Pitch)) ) * 57.2957795 return math.atan2(normMag[1],normMag[0])* 57.2957795 # saverState = None def on_press(key): global saverState try: k = key.char # single-char keys except: k = key.name # other keys if key == keyboard.Key.esc: return False # stop listener if k == '0': # keys interested saverState.value=0 print('Gloabl') print('Key pressed: ' + k) if k == '1': # keys interested saverState.value=1 print('Local') print('Key pressed: ' + k) elif k == '4': # keys interested saverState.value=2 print('Key pressed: ' + k) elif k == '2': # keys interested saverState.value=3 print('Key pressed: ' + k) print('Saving Mode') elif k=='c' or k == 'C': saverState.value=8 print('Key pressed: ' + k) return False elif k=='D' or k == 'd': saverState.value=9 print('Key pressed: ' + k) return False # return False # remove this if want more keys global SMAaverage global SMAList global bufferFlag def SMA(Data,maxLen): ''' simple moving average : smoothing data Args: Data : raw data removed gravity ''' global SMAaverage,SMAList,bufferFlag if SMAList.shape[0] == maxLen: bufferFlag = 1 SMAaverage = np.mean (SMAList,axis=0) SMAaverage = SMAaverage - SMAList[0,:]/float(maxLen) + Data/float(maxLen) SMAList = np.delete(SMAList, 0,axis=0) SMAList = np.concatenate ((SMAList, Data),axis=0) # print SMAaverage return SMAaverage else: SMAList = np.concatenate ((SMAList, Data),axis=0) return np.zeros((0,6)) global DTWModel global AnglePose if __name__ == '__main__': global saverState,AxisDict,XAction,YAction,ZAction,XModel,YModel,ZModel,SMAList,bufferFlag,EMAList,EMAaverage,DTWModel,JayAnglePose #Xn2p,Xn2pScaler,Xp2n,Xp2nScaler,Yn2p,Yn2pScaler,Yp2n,Yp2nScaler,Zn2p,Zn2pScaler,Zp2n,Zp2nScaler SMAList = np.zeros( (0,6) ) bufferFlag = 0 EMAList = np.zeros( (0,3) ) ret = np.zeros( (0,6) ) Acceration = np.zeros( (0,3) ) SMAaverage = None EMAaverage = None NotRecognize = 0 DTWModel = pickle.load(open("JayDTW.dat", "rb")) AnglePose = pickle.load(open("JayAnglePose.dat", "rb")) lis = keyboard.Listener(on_press=on_press) lis.start() # start to listen on a separate thread iface = 1 # try to scan it t =threading.Thread(target = ScanProcess,args=( iface, ) ) t.start() t.join() connList = [] mahony = Mahony.MahonyClass() node = myNode() connList.append("3c:cd:40:18:c1:98") #connect and Draw GetBacicData(node,connList[-1],"public",mahony,iface ) # yawCalibration = 0 staticLinearAcc = 0.0857 saverState = Value('i',-1) node.nodeCube = myCube() mydraw = myOpenGL.myDraw(node.nodeCube) plot1 = Array('f',[0.0 for i in range(0,200)]) plot2 = Array('f',[0.0 for i in range(0,200)]) plot3 = Array('f',[0.0 for i in range(0,200)]) plot4 = Array('f',[0.0 for i in range(0,200)]) plot5 = Array('f',[0.0 for i in range(0,200)]) plot6 = Array('f',[0.0 for i in range(0,200)]) plot7 = Array('i',[0 for i in range(0,650)]) Timestamp = Array('f',[0.0 for i in range(0,200)]) resetFlag = Value(c_bool,False) Idx = Value('i',-1) staticFlag = Value(c_bool,True) isCapturing = Value(c_bool,False) print "Drawing" # I am not sure it is still work well...... # QTwebcam ........ plot plotMyData = QTwebcam.QtCapture(0) plotRealTimeData = Process(target=QTWebCam,args=(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,Timestamp,isCapturing,staticFlag,resetFlag)) plotRealTimeData.daemon=True plotRealTimeData.start() # QTGraph ........ plot # plotMyData = QTRealLine.MyRealTimePlot() # plotRealTimeData = Process(target=QTRun,args=(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,plot7,Timestamp,Idx,resetFlag,staticFlag)) # plotRealTimeData.daemon=True # plotRealTimeData.start() #QTGraph ........ ScatterPlot # A scatter graph to visualize the mag and see if the value of mag is normal # plotMyData = QTRealTimeScatter.MyRealTimeScatterPlot() # plotRealTimeData = Process(target=QTRun,args=(plotMyData,plot1,plot2,plot3,flag)) # plotRealTimeData.daemon=True # plotRealTimeData.start() # how many data to plot numberOfPlot = 20 temp = 0 # how many samples used for filter numSamples = 20 while True: tStart = time.time() if node.Peripheral.waitForNotifications(0.01): gyro = [None]*3 mag = [None]*3 shortdata = node.noti if shortdata[6] != 0 and shortdata[7] != 0 and shortdata[8] != 0: mag[0] = shortdata[6]*0.15*node.magCalibration[0] - node.magBias[0] mag[1] = shortdata[7]*0.15*node.magCalibration[1] - node.magBias[1] mag[2] = shortdata[8]*0.15*node.magCalibration[2] - node.magBias[2] #print(mag) mag[0] *= node.magScale[0]*10 mag[1] *= node.magScale[1]*10 mag[2] *= node.magScale[2]*10 acc = S.dot( np.array([shortdata[0],shortdata[1],shortdata[2]]) - B) acc = np.asmatrix(acc) gyro = [None]*3 gyro[0] = (shortdata[3]/gyro_divider - node.gyroBias[0])*DEG2RAD gyro[1] = (shortdata[4]/gyro_divider - node.gyroBias[1])*DEG2RAD gyro[2] = (shortdata[5]/gyro_divider - node.gyroBias[2])*DEG2RAD gyro = np.asmatrix(gyro) #smoothing data (acc and gyro) - I found sma with buffer size 30 it is not enough for gyro filtterdData = SMA(np.concatenate( (acc,gyro),axis=1 ) ,30) if bufferFlag == 0: continue rawFilterdData = np.concatenate((acc,filtterdData[0,0:3]),axis=1 ) ret = np.concatenate( (ret,rawFilterdData),axis=0 ) if filtterdData.shape[0] != 0 : if ret.shape[0] == numberOfPlot: ret = ret.transpose() # print ret plot1[0:numberOfPlot] = ret[0,:].tolist()[0] plot2[0:numberOfPlot] = ret[1,:].tolist()[0] plot3[0:numberOfPlot] = ret[2,:].tolist()[0] plot4[0:numberOfPlot] = ret[3,:].tolist()[0] plot5[0:numberOfPlot] = ret[4,:].tolist()[0] plot6[0:numberOfPlot] = ret[5,:].tolist()[0] Idx.value = numberOfPlot staticFlag.value = False ret = np.zeros( (0,6) ) else: continue
__init__
identifier_name
RealTimePlotTemplate.py
from multiprocessing import Process, Manager,Array,Value, Lock from sklearn import preprocessing from ctypes import c_bool import math import DataStructure import signal import binascii from sklearn import preprocessing from sklearn.decomposition import PCA import time import struct import threading import pickle import collections import json,httplib import select import signal import sys import os from pynput import keyboard # import pyqtgraph as pg import numpy as np from bluepy import btle import fastdtw import DTW #---for save data #import self-defined module-------------------------------------------------------------- sys.path.append("OpenGL") import myOpenGL from myOpenGL import myCube import QTRealTimeScatter import QTwebcam import QTRealLine import Mahony import scipy #signal.signal(signal.SIGALRM, handler) # matplotlib.use('TkAgg') #global variables ----------------------------------------------------------------------- numOfDongle = 3 #ifae 0 dongle is responsible for scanning peripheral #list of node connecting #conn_list = [] #myopenGL's object saveData = [] acc_divider = 4095.999718 #acc_divider = 4096 gyro_divider = 65.500002 #gyro_divider = 65.5 DEG2RAD = 0.01745329251 ccount=1 #mahony_list = [] limit_num_of_received_data=50000 # fp = open('dataset1.dat', "wb") #append # fp1 = open('KNNClassifier.dat', "rb") index = 0 KNNmodel=0 lastYaw = 1000 Trainning_minimum=[] Trainning_diff=[] #class ----------------------------------------------------------------------------------- lastYaw=0 data1=np.NAN data2=np.NAN data3=np.NAN ''' [xxx]: xxx is a function Calibration: Acc: calculate calibration value beforhand Gyro: calculate at phase 1(startup phasess) in the function: [GetBacicData] Mag: calculate calibration value beforhand Process: # [ScanProcess](Scan) -> startup phasess ( [GetBacicData](connect, get Gyro calibaration value and stopping threshold) ) # -> create plot process([QTRun]) -> # Get IMU data by notification -> mackdwick(get angle) # -> remove gravity -> smoothing data # every 0.05 to execute the function [realtime] **** 0.05 is the window size Data: Inst.dataBlockSet: ['Acc'] : rawXYZ(save raw data), filteredXYZ(save filterd data) ['Gyo'] : rawXYZ(save raw data), filteredXYZ(save filterd data) ['Mag'] : rawXYZ(save raw data), filteredXYZ(save filterd data) ['Angle'] : rawXYZ(save raw data), filteredXYZ(save filterd data) We keep the sensor and angle data whatever the user is in the static or moving state So use the other varible(Inst.Windows.workingIDX) to keep the index of data when moving Inst.Windows.workingIDX : the index of data when moving ''' class MyDelegate(btle.DefaultDelegate): def __init__(self,node ): btle.DefaultDelegate.__init__(self) self.node = node def handleNotification(self, cHandle, data): b2a = binascii.b2a_hex(data) self.node.noti = Uint4Toshort([b2a[0:4],b2a[4:8],b2a[8:12],b2a[12:16],b2a[16:20],b2a[20:24],b2a[24:28],b2a[28:32],b2a[32:36],b2a[36:40]]) # ... perhaps check cHandle # ... process 'data' class myNode(object): '''' a class to maintain connection and the calibration value of sensors''' def __init__(self): self.Peripheral = None self.nodeCube = None self.drawWindowNumber = -1 self.accBias = [0.0,0.0,0.0] self.gyroBias = [0.0,0.0,0.0] self.magBias = [0.0,0.0,0.0] self.magScale = [0.0,0.0,0.0] self.magCalibration = [0.0,0.0,0.0] self.noti = None self.fail_notify=0 self.workingtime=0.0 self.datagram=[] self.seq=0 self.count_received_data=0 S = np.array([[ 2.42754810e-04, 3.41614666e-07, -2.07507663e-07], [ 3.41614666e-07, 2.43926399e-04, 1.68822071e-07], [ -2.07507663e-07, 1.68822071e-07, 2.43800712e-04]]) B = [-28.43905915, 51.22161875, -72.33527491] global S,B def BLEconnection(connNode,addr,connType,iface): ''' do ble connection ''' connNode.Peripheral = btle.Peripheral(addr , connType , iface = iface) connNode.Peripheral.setDelegate(MyDelegate(connNode)) magCalibration = binascii.b2a_hex(connNode.Peripheral.readCharacteristic(0x4C)) calibrationData = [magCalibration[0:8], magCalibration[8:16], magCalibration[16:24]] connNode.magCalibration = Uint8Tofloat(calibrationData) connNode.accBias = [-0.039746094, -0.012792969, -0.056347656] connNode.gyroBias = [1.477862573, 0.088549618, -1.477862597] # connNode.magBias = [57.712502, 27.521484, -37.898438 ] # connNode.magScale = [0.990893, 1.042146, 0.969697] connNode.magBias = [52.190625, 26.627929687499996, -24.46171875] connNode.magScale = [1.0418410041841004, 0.9688715953307393, 0.9920318725099602] #home # connNode.magBias = [48.312499, 41.460943, -21.877735 ] # connNode.magScale = [1.005747, 1.009227, 0.985360] print("accScales: ",S) print("accBias: ",B) # print("gyroBias: ",connNode.gyroBias) print("magBias: ",connNode.magBias) print("magScale: ",connNode.magScale) print("magCalibration: ",connNode.magCalibration) print("connect successfully") #connNode.setCalValue(connNode.accBias, connNode.gyroBias,connNode.magBias,connNode.magScale,connNode.magCalibration) #iface = (iface + 1) % numOfDongle + 1 #Try to get Service , Characteristic and set notification try: #need to add 0000fed0-0000-1000-8000-00805f9b34fb service = connNode.Peripheral.getServiceByUUID("0000FED0-0000-1000-8000-00805f9b34fb") char = service.getCharacteristics("0000FED7-0000-1000-8000-00805f9b34fb")[0] connNode.Peripheral.writeCharacteristic(char.handle + 2,struct.pack('<bb', 0x01, 0x00),True) except: print("get service, characteristic or set notification failed") def ScanProcess(iface=0): '''Scan ''' scanner = btle.Scanner(iface) while True : print("Still scanning... count: %s" % 1) try: devcies = scanner.scan(timeout = 3) # print devcies for dev in devcies: # print "xx" if dev.addr == "3c:cd:40:18:c1:98": #3c:cd:40:18:c3:46 3c:cd:40:0b:c0:48 #3c:cd:40:0b:c1:11 #3c:cd:40:18:c1:98 print("devcies %s (%s) , RSSI = %d dB" %(dev.addr , dev.addrType , dev.rssi)) return #Try to create connection except: print "failed scan" exit() def struct_isqrt(number): threehalfs = 1.5 x2 = number * 0.5 y = number packed_y = struct.pack('f', y) i = struct.unpack('i', packed_y)[0] # treat float's bytes as int i = 0x5f3759df - (i >> 1) # arithmetic with magic number packed_i = struct.pack('i', i) y = struct.unpack('f', packed_i)[0] # treat int's bytes as float y = y * (threehalfs - (x2 * y * y)) # Newton's method return y def QTRun(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,dataLengthList,Timestamp,Idx,resetFlag,isStatic): data=[[],[],[],[],[],[]] windowsLen = [] # print "xxxxxxxxxxxxxxxx" while True: # continue tEnd = time.time() while isStatic.value == True:
if resetFlag.value == True: plotMyData.ResetGraph() resetFlag.value = False endIdx = Idx.value data[0]= plot1[0:endIdx] data[1]= plot2[0:endIdx] data[2]= plot3[0:endIdx] data[3]= plot4[0:endIdx] data[4]= plot5[0:endIdx] data[5]= plot6[0:endIdx] windowsLen.append([0,1]) windowsLen.append([0,1]) windowsLen.append([0,1]) isStatic.value = True # data[3].append(plot4.value) # data[4].append(plot5.value) # data[5].append(plot6.value) # data[6].append(timestamp.value) #,isCapturing.value plotMyData.setMyData(data,windowsLen) #isCapturing.value data=[[],[],[],[],[],[],[]] windowsLen = [] # tStart = time.time() def QTWebCam(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,Timestamp,isCapturing,isStatic,resetFlag): data=[[],[],[],[],[],[],[]] windowsLen = [] while True: tStart = time.time() while isStatic.value == True: pass if resetFlag.value == True: plotMyData.ResetGraph() resetFlag.value = False endIdx = Idx.value data[0]= plot1[0:endIdx] data[1]= plot2[0:endIdx] data[2]= plot3[0:endIdx] data[3]= plot4[0:endIdx] data[4]= plot5[0:endIdx] data[5]= plot6[0:endIdx] data[6]= Timestamp[0:endIdx] plotMyData.setMyData(data,isCapturing.value) data=[[],[],[],[],[],[],[],[]] windowsLen = [] tStart = time.time() isStatic.value = True def GetBacicData(node,addr,connType,mahony,iface): '''Get the stopping threshold and the calibration of gyro Args: node : addr : sensor ble address connType : pubilc/ramdon iface : which dongle you use to construct the connection ''' yawCalibration=0.0 BLEconnection(node,addr,connType,iface=iface) count = 0 gravity = 0 staticLinearAcc = [] staticLinearGyo = [] print "Do not moving!!!" while count!= 300: if node.Peripheral.waitForNotifications(0.01): count = count + 1 rawdata = node.noti node.gyroBias[0] += rawdata[3] node.gyroBias[1] += rawdata[4] node.gyroBias[2] += rawdata[5] node.gyroBias[0] = node.gyroBias[0]/gyro_divider/300 node.gyroBias[1] = node.gyroBias[1]/gyro_divider/300 node.gyroBias[2] = node.gyroBias[2]/gyro_divider/300 # print yawCalibration def Uint4Toshort(tenData): #print(threeData) retVal =[] for data in tenData: #(data) i = 0 byteArray = [] while(i != 4): byteArray.append(int(data[i:i+2], 16)) #print(int(data, 16)) i=i+2 b = ''.join(chr(i) for i in byteArray) if data == tenData[9]: retVal.append(struct.unpack('<H',b)[0]) else: retVal.append(struct.unpack('<h',b)[0]) # print retVal return retVal def Uint8Tofloat(threeData): #print(threeData) retVal =[] for data in threeData: #(data) i = 0 byteArray = [] while(i != 8): byteArray.append(int(data[i:i+2], 16)) #print(int(data, 16)) i=i+2 b = ''.join(chr(i) for i in byteArray) retVal.append(struct.unpack('<f',b)[0]) return retVal gravityList = [0.0,0.0,1.0] S = np.array([[ 2.42754810e-04, 3.41614666e-07, -2.07507663e-07], [ 3.41614666e-07, 2.43926399e-04, 1.68822071e-07], [ -2.07507663e-07, 1.68822071e-07, 2.43800712e-04]]) B = [-28.43905915, 51.22161875, -72.33527491] global S,B def getYawByMag(normMag,Roll,Pitch): # return math.atan2( (normMag[1]*math.cos(Roll) + normMag[2]*math.sin(Roll) ) , (normMag[0]*math.cos(Pitch) + normMag[1]*math.sin(Pitch)*math.sin(Roll) - normMag[2]*math.sin(Pitch)*math.cos(Roll)) ) * 57.2957795 # return math.atan2( (normMag[0]*math.cos(Pitch) + normMag[2]*math.sin(Pitch) ) , (normMag[0]*math.sin(Pitch)*math.sin(Roll) + normMag[1]*math.cos(Roll) - normMag[2]*math.sin(Roll)*math.cos(Pitch)) ) * 57.2957795 return math.atan2(normMag[1],normMag[0])* 57.2957795 # saverState = None def on_press(key): global saverState try: k = key.char # single-char keys except: k = key.name # other keys if key == keyboard.Key.esc: return False # stop listener if k == '0': # keys interested saverState.value=0 print('Gloabl') print('Key pressed: ' + k) if k == '1': # keys interested saverState.value=1 print('Local') print('Key pressed: ' + k) elif k == '4': # keys interested saverState.value=2 print('Key pressed: ' + k) elif k == '2': # keys interested saverState.value=3 print('Key pressed: ' + k) print('Saving Mode') elif k=='c' or k == 'C': saverState.value=8 print('Key pressed: ' + k) return False elif k=='D' or k == 'd': saverState.value=9 print('Key pressed: ' + k) return False # return False # remove this if want more keys global SMAaverage global SMAList global bufferFlag def SMA(Data,maxLen): ''' simple moving average : smoothing data Args: Data : raw data removed gravity ''' global SMAaverage,SMAList,bufferFlag if SMAList.shape[0] == maxLen: bufferFlag = 1 SMAaverage = np.mean (SMAList,axis=0) SMAaverage = SMAaverage - SMAList[0,:]/float(maxLen) + Data/float(maxLen) SMAList = np.delete(SMAList, 0,axis=0) SMAList = np.concatenate ((SMAList, Data),axis=0) # print SMAaverage return SMAaverage else: SMAList = np.concatenate ((SMAList, Data),axis=0) return np.zeros((0,6)) global DTWModel global AnglePose if __name__ == '__main__': global saverState,AxisDict,XAction,YAction,ZAction,XModel,YModel,ZModel,SMAList,bufferFlag,EMAList,EMAaverage,DTWModel,JayAnglePose #Xn2p,Xn2pScaler,Xp2n,Xp2nScaler,Yn2p,Yn2pScaler,Yp2n,Yp2nScaler,Zn2p,Zn2pScaler,Zp2n,Zp2nScaler SMAList = np.zeros( (0,6) ) bufferFlag = 0 EMAList = np.zeros( (0,3) ) ret = np.zeros( (0,6) ) Acceration = np.zeros( (0,3) ) SMAaverage = None EMAaverage = None NotRecognize = 0 DTWModel = pickle.load(open("JayDTW.dat", "rb")) AnglePose = pickle.load(open("JayAnglePose.dat", "rb")) lis = keyboard.Listener(on_press=on_press) lis.start() # start to listen on a separate thread iface = 1 # try to scan it t =threading.Thread(target = ScanProcess,args=( iface, ) ) t.start() t.join() connList = [] mahony = Mahony.MahonyClass() node = myNode() connList.append("3c:cd:40:18:c1:98") #connect and Draw GetBacicData(node,connList[-1],"public",mahony,iface ) # yawCalibration = 0 staticLinearAcc = 0.0857 saverState = Value('i',-1) node.nodeCube = myCube() mydraw = myOpenGL.myDraw(node.nodeCube) plot1 = Array('f',[0.0 for i in range(0,200)]) plot2 = Array('f',[0.0 for i in range(0,200)]) plot3 = Array('f',[0.0 for i in range(0,200)]) plot4 = Array('f',[0.0 for i in range(0,200)]) plot5 = Array('f',[0.0 for i in range(0,200)]) plot6 = Array('f',[0.0 for i in range(0,200)]) plot7 = Array('i',[0 for i in range(0,650)]) Timestamp = Array('f',[0.0 for i in range(0,200)]) resetFlag = Value(c_bool,False) Idx = Value('i',-1) staticFlag = Value(c_bool,True) isCapturing = Value(c_bool,False) print "Drawing" # I am not sure it is still work well...... # QTwebcam ........ plot plotMyData = QTwebcam.QtCapture(0) plotRealTimeData = Process(target=QTWebCam,args=(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,Timestamp,isCapturing,staticFlag,resetFlag)) plotRealTimeData.daemon=True plotRealTimeData.start() # QTGraph ........ plot # plotMyData = QTRealLine.MyRealTimePlot() # plotRealTimeData = Process(target=QTRun,args=(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,plot7,Timestamp,Idx,resetFlag,staticFlag)) # plotRealTimeData.daemon=True # plotRealTimeData.start() #QTGraph ........ ScatterPlot # A scatter graph to visualize the mag and see if the value of mag is normal # plotMyData = QTRealTimeScatter.MyRealTimeScatterPlot() # plotRealTimeData = Process(target=QTRun,args=(plotMyData,plot1,plot2,plot3,flag)) # plotRealTimeData.daemon=True # plotRealTimeData.start() # how many data to plot numberOfPlot = 20 temp = 0 # how many samples used for filter numSamples = 20 while True: tStart = time.time() if node.Peripheral.waitForNotifications(0.01): gyro = [None]*3 mag = [None]*3 shortdata = node.noti if shortdata[6] != 0 and shortdata[7] != 0 and shortdata[8] != 0: mag[0] = shortdata[6]*0.15*node.magCalibration[0] - node.magBias[0] mag[1] = shortdata[7]*0.15*node.magCalibration[1] - node.magBias[1] mag[2] = shortdata[8]*0.15*node.magCalibration[2] - node.magBias[2] #print(mag) mag[0] *= node.magScale[0]*10 mag[1] *= node.magScale[1]*10 mag[2] *= node.magScale[2]*10 acc = S.dot( np.array([shortdata[0],shortdata[1],shortdata[2]]) - B) acc = np.asmatrix(acc) gyro = [None]*3 gyro[0] = (shortdata[3]/gyro_divider - node.gyroBias[0])*DEG2RAD gyro[1] = (shortdata[4]/gyro_divider - node.gyroBias[1])*DEG2RAD gyro[2] = (shortdata[5]/gyro_divider - node.gyroBias[2])*DEG2RAD gyro = np.asmatrix(gyro) #smoothing data (acc and gyro) - I found sma with buffer size 30 it is not enough for gyro filtterdData = SMA(np.concatenate( (acc,gyro),axis=1 ) ,30) if bufferFlag == 0: continue rawFilterdData = np.concatenate((acc,filtterdData[0,0:3]),axis=1 ) ret = np.concatenate( (ret,rawFilterdData),axis=0 ) if filtterdData.shape[0] != 0 : if ret.shape[0] == numberOfPlot: ret = ret.transpose() # print ret plot1[0:numberOfPlot] = ret[0,:].tolist()[0] plot2[0:numberOfPlot] = ret[1,:].tolist()[0] plot3[0:numberOfPlot] = ret[2,:].tolist()[0] plot4[0:numberOfPlot] = ret[3,:].tolist()[0] plot5[0:numberOfPlot] = ret[4,:].tolist()[0] plot6[0:numberOfPlot] = ret[5,:].tolist()[0] Idx.value = numberOfPlot staticFlag.value = False ret = np.zeros( (0,6) ) else: continue
pass
conditional_block
RealTimePlotTemplate.py
from multiprocessing import Process, Manager,Array,Value, Lock from sklearn import preprocessing from ctypes import c_bool import math import DataStructure import signal import binascii from sklearn import preprocessing from sklearn.decomposition import PCA import time import struct import threading import pickle import collections import json,httplib import select import signal import sys import os from pynput import keyboard # import pyqtgraph as pg import numpy as np from bluepy import btle import fastdtw import DTW #---for save data #import self-defined module-------------------------------------------------------------- sys.path.append("OpenGL") import myOpenGL from myOpenGL import myCube import QTRealTimeScatter import QTwebcam import QTRealLine import Mahony import scipy #signal.signal(signal.SIGALRM, handler) # matplotlib.use('TkAgg') #global variables ----------------------------------------------------------------------- numOfDongle = 3 #ifae 0 dongle is responsible for scanning peripheral #list of node connecting #conn_list = [] #myopenGL's object saveData = [] acc_divider = 4095.999718 #acc_divider = 4096 gyro_divider = 65.500002 #gyro_divider = 65.5 DEG2RAD = 0.01745329251 ccount=1 #mahony_list = [] limit_num_of_received_data=50000 # fp = open('dataset1.dat', "wb") #append # fp1 = open('KNNClassifier.dat', "rb") index = 0 KNNmodel=0 lastYaw = 1000 Trainning_minimum=[] Trainning_diff=[] #class ----------------------------------------------------------------------------------- lastYaw=0 data1=np.NAN data2=np.NAN data3=np.NAN ''' [xxx]: xxx is a function Calibration: Acc: calculate calibration value beforhand Gyro: calculate at phase 1(startup phasess) in the function: [GetBacicData] Mag: calculate calibration value beforhand Process: # [ScanProcess](Scan) -> startup phasess ( [GetBacicData](connect, get Gyro calibaration value and stopping threshold) ) # -> create plot process([QTRun]) -> # Get IMU data by notification -> mackdwick(get angle) # -> remove gravity -> smoothing data # every 0.05 to execute the function [realtime] **** 0.05 is the window size Data: Inst.dataBlockSet: ['Acc'] : rawXYZ(save raw data), filteredXYZ(save filterd data) ['Gyo'] : rawXYZ(save raw data), filteredXYZ(save filterd data) ['Mag'] : rawXYZ(save raw data), filteredXYZ(save filterd data) ['Angle'] : rawXYZ(save raw data), filteredXYZ(save filterd data) We keep the sensor and angle data whatever the user is in the static or moving state So use the other varible(Inst.Windows.workingIDX) to keep the index of data when moving Inst.Windows.workingIDX : the index of data when moving ''' class MyDelegate(btle.DefaultDelegate): def __init__(self,node ): btle.DefaultDelegate.__init__(self) self.node = node def handleNotification(self, cHandle, data): b2a = binascii.b2a_hex(data) self.node.noti = Uint4Toshort([b2a[0:4],b2a[4:8],b2a[8:12],b2a[12:16],b2a[16:20],b2a[20:24],b2a[24:28],b2a[28:32],b2a[32:36],b2a[36:40]]) # ... perhaps check cHandle # ... process 'data' class myNode(object): '''' a class to maintain connection and the calibration value of sensors''' def __init__(self): self.Peripheral = None self.nodeCube = None self.drawWindowNumber = -1
self.noti = None self.fail_notify=0 self.workingtime=0.0 self.datagram=[] self.seq=0 self.count_received_data=0 S = np.array([[ 2.42754810e-04, 3.41614666e-07, -2.07507663e-07], [ 3.41614666e-07, 2.43926399e-04, 1.68822071e-07], [ -2.07507663e-07, 1.68822071e-07, 2.43800712e-04]]) B = [-28.43905915, 51.22161875, -72.33527491] global S,B def BLEconnection(connNode,addr,connType,iface): ''' do ble connection ''' connNode.Peripheral = btle.Peripheral(addr , connType , iface = iface) connNode.Peripheral.setDelegate(MyDelegate(connNode)) magCalibration = binascii.b2a_hex(connNode.Peripheral.readCharacteristic(0x4C)) calibrationData = [magCalibration[0:8], magCalibration[8:16], magCalibration[16:24]] connNode.magCalibration = Uint8Tofloat(calibrationData) connNode.accBias = [-0.039746094, -0.012792969, -0.056347656] connNode.gyroBias = [1.477862573, 0.088549618, -1.477862597] # connNode.magBias = [57.712502, 27.521484, -37.898438 ] # connNode.magScale = [0.990893, 1.042146, 0.969697] connNode.magBias = [52.190625, 26.627929687499996, -24.46171875] connNode.magScale = [1.0418410041841004, 0.9688715953307393, 0.9920318725099602] #home # connNode.magBias = [48.312499, 41.460943, -21.877735 ] # connNode.magScale = [1.005747, 1.009227, 0.985360] print("accScales: ",S) print("accBias: ",B) # print("gyroBias: ",connNode.gyroBias) print("magBias: ",connNode.magBias) print("magScale: ",connNode.magScale) print("magCalibration: ",connNode.magCalibration) print("connect successfully") #connNode.setCalValue(connNode.accBias, connNode.gyroBias,connNode.magBias,connNode.magScale,connNode.magCalibration) #iface = (iface + 1) % numOfDongle + 1 #Try to get Service , Characteristic and set notification try: #need to add 0000fed0-0000-1000-8000-00805f9b34fb service = connNode.Peripheral.getServiceByUUID("0000FED0-0000-1000-8000-00805f9b34fb") char = service.getCharacteristics("0000FED7-0000-1000-8000-00805f9b34fb")[0] connNode.Peripheral.writeCharacteristic(char.handle + 2,struct.pack('<bb', 0x01, 0x00),True) except: print("get service, characteristic or set notification failed") def ScanProcess(iface=0): '''Scan ''' scanner = btle.Scanner(iface) while True : print("Still scanning... count: %s" % 1) try: devcies = scanner.scan(timeout = 3) # print devcies for dev in devcies: # print "xx" if dev.addr == "3c:cd:40:18:c1:98": #3c:cd:40:18:c3:46 3c:cd:40:0b:c0:48 #3c:cd:40:0b:c1:11 #3c:cd:40:18:c1:98 print("devcies %s (%s) , RSSI = %d dB" %(dev.addr , dev.addrType , dev.rssi)) return #Try to create connection except: print "failed scan" exit() def struct_isqrt(number): threehalfs = 1.5 x2 = number * 0.5 y = number packed_y = struct.pack('f', y) i = struct.unpack('i', packed_y)[0] # treat float's bytes as int i = 0x5f3759df - (i >> 1) # arithmetic with magic number packed_i = struct.pack('i', i) y = struct.unpack('f', packed_i)[0] # treat int's bytes as float y = y * (threehalfs - (x2 * y * y)) # Newton's method return y def QTRun(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,dataLengthList,Timestamp,Idx,resetFlag,isStatic): data=[[],[],[],[],[],[]] windowsLen = [] # print "xxxxxxxxxxxxxxxx" while True: # continue tEnd = time.time() while isStatic.value == True: pass if resetFlag.value == True: plotMyData.ResetGraph() resetFlag.value = False endIdx = Idx.value data[0]= plot1[0:endIdx] data[1]= plot2[0:endIdx] data[2]= plot3[0:endIdx] data[3]= plot4[0:endIdx] data[4]= plot5[0:endIdx] data[5]= plot6[0:endIdx] windowsLen.append([0,1]) windowsLen.append([0,1]) windowsLen.append([0,1]) isStatic.value = True # data[3].append(plot4.value) # data[4].append(plot5.value) # data[5].append(plot6.value) # data[6].append(timestamp.value) #,isCapturing.value plotMyData.setMyData(data,windowsLen) #isCapturing.value data=[[],[],[],[],[],[],[]] windowsLen = [] # tStart = time.time() def QTWebCam(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,Timestamp,isCapturing,isStatic,resetFlag): data=[[],[],[],[],[],[],[]] windowsLen = [] while True: tStart = time.time() while isStatic.value == True: pass if resetFlag.value == True: plotMyData.ResetGraph() resetFlag.value = False endIdx = Idx.value data[0]= plot1[0:endIdx] data[1]= plot2[0:endIdx] data[2]= plot3[0:endIdx] data[3]= plot4[0:endIdx] data[4]= plot5[0:endIdx] data[5]= plot6[0:endIdx] data[6]= Timestamp[0:endIdx] plotMyData.setMyData(data,isCapturing.value) data=[[],[],[],[],[],[],[],[]] windowsLen = [] tStart = time.time() isStatic.value = True def GetBacicData(node,addr,connType,mahony,iface): '''Get the stopping threshold and the calibration of gyro Args: node : addr : sensor ble address connType : pubilc/ramdon iface : which dongle you use to construct the connection ''' yawCalibration=0.0 BLEconnection(node,addr,connType,iface=iface) count = 0 gravity = 0 staticLinearAcc = [] staticLinearGyo = [] print "Do not moving!!!" while count!= 300: if node.Peripheral.waitForNotifications(0.01): count = count + 1 rawdata = node.noti node.gyroBias[0] += rawdata[3] node.gyroBias[1] += rawdata[4] node.gyroBias[2] += rawdata[5] node.gyroBias[0] = node.gyroBias[0]/gyro_divider/300 node.gyroBias[1] = node.gyroBias[1]/gyro_divider/300 node.gyroBias[2] = node.gyroBias[2]/gyro_divider/300 # print yawCalibration def Uint4Toshort(tenData): #print(threeData) retVal =[] for data in tenData: #(data) i = 0 byteArray = [] while(i != 4): byteArray.append(int(data[i:i+2], 16)) #print(int(data, 16)) i=i+2 b = ''.join(chr(i) for i in byteArray) if data == tenData[9]: retVal.append(struct.unpack('<H',b)[0]) else: retVal.append(struct.unpack('<h',b)[0]) # print retVal return retVal def Uint8Tofloat(threeData): #print(threeData) retVal =[] for data in threeData: #(data) i = 0 byteArray = [] while(i != 8): byteArray.append(int(data[i:i+2], 16)) #print(int(data, 16)) i=i+2 b = ''.join(chr(i) for i in byteArray) retVal.append(struct.unpack('<f',b)[0]) return retVal gravityList = [0.0,0.0,1.0] S = np.array([[ 2.42754810e-04, 3.41614666e-07, -2.07507663e-07], [ 3.41614666e-07, 2.43926399e-04, 1.68822071e-07], [ -2.07507663e-07, 1.68822071e-07, 2.43800712e-04]]) B = [-28.43905915, 51.22161875, -72.33527491] global S,B def getYawByMag(normMag,Roll,Pitch): # return math.atan2( (normMag[1]*math.cos(Roll) + normMag[2]*math.sin(Roll) ) , (normMag[0]*math.cos(Pitch) + normMag[1]*math.sin(Pitch)*math.sin(Roll) - normMag[2]*math.sin(Pitch)*math.cos(Roll)) ) * 57.2957795 # return math.atan2( (normMag[0]*math.cos(Pitch) + normMag[2]*math.sin(Pitch) ) , (normMag[0]*math.sin(Pitch)*math.sin(Roll) + normMag[1]*math.cos(Roll) - normMag[2]*math.sin(Roll)*math.cos(Pitch)) ) * 57.2957795 return math.atan2(normMag[1],normMag[0])* 57.2957795 # saverState = None def on_press(key): global saverState try: k = key.char # single-char keys except: k = key.name # other keys if key == keyboard.Key.esc: return False # stop listener if k == '0': # keys interested saverState.value=0 print('Gloabl') print('Key pressed: ' + k) if k == '1': # keys interested saverState.value=1 print('Local') print('Key pressed: ' + k) elif k == '4': # keys interested saverState.value=2 print('Key pressed: ' + k) elif k == '2': # keys interested saverState.value=3 print('Key pressed: ' + k) print('Saving Mode') elif k=='c' or k == 'C': saverState.value=8 print('Key pressed: ' + k) return False elif k=='D' or k == 'd': saverState.value=9 print('Key pressed: ' + k) return False # return False # remove this if want more keys global SMAaverage global SMAList global bufferFlag def SMA(Data,maxLen): ''' simple moving average : smoothing data Args: Data : raw data removed gravity ''' global SMAaverage,SMAList,bufferFlag if SMAList.shape[0] == maxLen: bufferFlag = 1 SMAaverage = np.mean (SMAList,axis=0) SMAaverage = SMAaverage - SMAList[0,:]/float(maxLen) + Data/float(maxLen) SMAList = np.delete(SMAList, 0,axis=0) SMAList = np.concatenate ((SMAList, Data),axis=0) # print SMAaverage return SMAaverage else: SMAList = np.concatenate ((SMAList, Data),axis=0) return np.zeros((0,6)) global DTWModel global AnglePose if __name__ == '__main__': global saverState,AxisDict,XAction,YAction,ZAction,XModel,YModel,ZModel,SMAList,bufferFlag,EMAList,EMAaverage,DTWModel,JayAnglePose #Xn2p,Xn2pScaler,Xp2n,Xp2nScaler,Yn2p,Yn2pScaler,Yp2n,Yp2nScaler,Zn2p,Zn2pScaler,Zp2n,Zp2nScaler SMAList = np.zeros( (0,6) ) bufferFlag = 0 EMAList = np.zeros( (0,3) ) ret = np.zeros( (0,6) ) Acceration = np.zeros( (0,3) ) SMAaverage = None EMAaverage = None NotRecognize = 0 DTWModel = pickle.load(open("JayDTW.dat", "rb")) AnglePose = pickle.load(open("JayAnglePose.dat", "rb")) lis = keyboard.Listener(on_press=on_press) lis.start() # start to listen on a separate thread iface = 1 # try to scan it t =threading.Thread(target = ScanProcess,args=( iface, ) ) t.start() t.join() connList = [] mahony = Mahony.MahonyClass() node = myNode() connList.append("3c:cd:40:18:c1:98") #connect and Draw GetBacicData(node,connList[-1],"public",mahony,iface ) # yawCalibration = 0 staticLinearAcc = 0.0857 saverState = Value('i',-1) node.nodeCube = myCube() mydraw = myOpenGL.myDraw(node.nodeCube) plot1 = Array('f',[0.0 for i in range(0,200)]) plot2 = Array('f',[0.0 for i in range(0,200)]) plot3 = Array('f',[0.0 for i in range(0,200)]) plot4 = Array('f',[0.0 for i in range(0,200)]) plot5 = Array('f',[0.0 for i in range(0,200)]) plot6 = Array('f',[0.0 for i in range(0,200)]) plot7 = Array('i',[0 for i in range(0,650)]) Timestamp = Array('f',[0.0 for i in range(0,200)]) resetFlag = Value(c_bool,False) Idx = Value('i',-1) staticFlag = Value(c_bool,True) isCapturing = Value(c_bool,False) print "Drawing" # I am not sure it is still work well...... # QTwebcam ........ plot plotMyData = QTwebcam.QtCapture(0) plotRealTimeData = Process(target=QTWebCam,args=(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,Timestamp,isCapturing,staticFlag,resetFlag)) plotRealTimeData.daemon=True plotRealTimeData.start() # QTGraph ........ plot # plotMyData = QTRealLine.MyRealTimePlot() # plotRealTimeData = Process(target=QTRun,args=(plotMyData,plot1,plot2,plot3,plot4,plot5,plot6,plot7,Timestamp,Idx,resetFlag,staticFlag)) # plotRealTimeData.daemon=True # plotRealTimeData.start() #QTGraph ........ ScatterPlot # A scatter graph to visualize the mag and see if the value of mag is normal # plotMyData = QTRealTimeScatter.MyRealTimeScatterPlot() # plotRealTimeData = Process(target=QTRun,args=(plotMyData,plot1,plot2,plot3,flag)) # plotRealTimeData.daemon=True # plotRealTimeData.start() # how many data to plot numberOfPlot = 20 temp = 0 # how many samples used for filter numSamples = 20 while True: tStart = time.time() if node.Peripheral.waitForNotifications(0.01): gyro = [None]*3 mag = [None]*3 shortdata = node.noti if shortdata[6] != 0 and shortdata[7] != 0 and shortdata[8] != 0: mag[0] = shortdata[6]*0.15*node.magCalibration[0] - node.magBias[0] mag[1] = shortdata[7]*0.15*node.magCalibration[1] - node.magBias[1] mag[2] = shortdata[8]*0.15*node.magCalibration[2] - node.magBias[2] #print(mag) mag[0] *= node.magScale[0]*10 mag[1] *= node.magScale[1]*10 mag[2] *= node.magScale[2]*10 acc = S.dot( np.array([shortdata[0],shortdata[1],shortdata[2]]) - B) acc = np.asmatrix(acc) gyro = [None]*3 gyro[0] = (shortdata[3]/gyro_divider - node.gyroBias[0])*DEG2RAD gyro[1] = (shortdata[4]/gyro_divider - node.gyroBias[1])*DEG2RAD gyro[2] = (shortdata[5]/gyro_divider - node.gyroBias[2])*DEG2RAD gyro = np.asmatrix(gyro) #smoothing data (acc and gyro) - I found sma with buffer size 30 it is not enough for gyro filtterdData = SMA(np.concatenate( (acc,gyro),axis=1 ) ,30) if bufferFlag == 0: continue rawFilterdData = np.concatenate((acc,filtterdData[0,0:3]),axis=1 ) ret = np.concatenate( (ret,rawFilterdData),axis=0 ) if filtterdData.shape[0] != 0 : if ret.shape[0] == numberOfPlot: ret = ret.transpose() # print ret plot1[0:numberOfPlot] = ret[0,:].tolist()[0] plot2[0:numberOfPlot] = ret[1,:].tolist()[0] plot3[0:numberOfPlot] = ret[2,:].tolist()[0] plot4[0:numberOfPlot] = ret[3,:].tolist()[0] plot5[0:numberOfPlot] = ret[4,:].tolist()[0] plot6[0:numberOfPlot] = ret[5,:].tolist()[0] Idx.value = numberOfPlot staticFlag.value = False ret = np.zeros( (0,6) ) else: continue
self.accBias = [0.0,0.0,0.0] self.gyroBias = [0.0,0.0,0.0] self.magBias = [0.0,0.0,0.0] self.magScale = [0.0,0.0,0.0] self.magCalibration = [0.0,0.0,0.0]
random_line_split
main.rs
fn main() { //scope() //moves_and_mem(); //refs() slices() } //////////////////////////////////////////////////////////////////////////////// // What is Ownership? //////////////////////////////////////////////////////////////////////////////// // Ownership is Rust's central feature. // All programs have to manage the way they use a computer's memory while // running. Some have garbage collection that constantly looks for no // longer used memory as the program runs; in others, the programmer must // explicitly allocate and release the memory. Rust uses a third approach: // // Memory is managed through a system of ownership with a set of rules // that the compiler checks at compile time. No run-time costs are // incurred for any of the ownership features. // // [Stack and Heap] // In most langs, you really don't need to think about the stack and // the heap often. In systems langs, whether the value is on the stack // or on the heap has more of an effect on how the language behaves and // why we have to make certain decisions. // // The stack and the heap are parts of memory that are available to your // code to use at runtime, but they are structured differently. The stack // stores values in the order it gets them and removes the values in // the opposite order (LIFO). This is referred to as // *pushing onto the stack* and *popping off of the stack* // // It's fast because of the way it accesses the data: it never has to // search for a place to put new data or a place to get data from because // that place is *always* the top of the stack. Another propery is that // all data on the stack must take up a known, fixed size. // // For data that is an unknown size at compile time or a size that may // changeo ver time, we can store that data on the heap instead. The heap // is less organized; we just ask for some amount of space. The OS // finds an empty spot somewhere that's big enough for the request, marks // it as in use, and returns a pointer to that location. It's called // *allocating on the heap*. Pushing onto the stack is not considered // allocation. A pointer is a known, fixed size, so it can sit on the // heap, but for actual data, we have to follow the pointer. // // The heap is slower than the stack because we have to follow a pointer // to get there (a level of indirection). Processors are faster due to // temporal and spacial locality and caching if they have to jump around // less. // // When a function is called, the values passed into the function // (including, potentially pointers to data on the heap) and the fns // local vars get pushed onto the stack. When its over, the vals get // popped off the stack. // // !! // Keeping track of what code is using what data on the heap, minimizing // duplicate data on the heap, and cleaning up unused data on the heap // so we don't run out of space are all problems that ownership helps. // Once ownership is understood, you won't have to think about the stack // and the heap often, but knowing that managing heap data is why // ownership exists can help explain why it works the way that it does. // !! // // [Ownership Rules] // There exist 3 very important rules to ownership in Rust: // // 1) Each value in Rust has a variable that's called its *owner* // 2) There can only be one owner at a time (the highlander rule) // 3) When the owner goes out of scope, the value will be dropped // // + Variable Scope // See scope() // // + Memory and Allocation // See moves_and_mem // // + References // See refs() fn scope() { // First example of ownership, we'll look at the *scope* of some // variables. Scope is the range within a program for which an item // is valid. // s is a string literal. the value of the string is hardcoded into // the text of the program. The variable is valid from the point // at which it's declared until the end of the current *scope*. { // s is not valid here, it's not yet declared let s = "hello"; // s is valid from this point forwards // do some stuff with s } // this scope is now over, and s is no longer valid // There are two important points in time here: // 1) When s comes *into* scope, it is valid. // 2) s remains valid until it is *out of scope* // // // The relationship between scopes and when variables are valid // is similar to other programming langs. Let's build on top // of this introducing the String type. // // + String type // We're going to illustrate the rules of ownership using a data type // that's more complex than the ones we've seen before. All the data // types we've seen before are stored on the stack and popped off the // stack when their scope is over, but we want to look at data // that's on the heap and explore how Rust knows to clean that up. // // We'll concentrate on the parts of String that relate to ownership. // They also apply to other complex data types provided by the // stdlib and those that you create. // // We've seen string literals hardcoded into the program. They're // convenient, but they aren't suitable for every situation in which // you want to use text. For one reason, they're immutable. Also, not // every string value is known when we write our code. The other type // is a String, which is allocated on the heap. It's able to store an // amount of text that is unknown at compile time. It's created from // a literal with a `from` function: let s = String::from("hello"); // Again, double colon (::) is an op that allows us to namespace // this from function under the String type rather than using a name // like string_from. It can be mutated: let mut s = String::from("hello"); s.push_str(", world!"); // appends a literal to a String println!("{}", s); // Will print the full string. // Why can Strings be mutated but literals cannot? Difference is // how they deal with memory. } fn moves_and_mem() { // With string literals, we know the contents of the string at compile // time, so the text is literally hardcoded into the executable, // making them extremely fast and efficient. This property only comes // from its immutability. We can't put a blob of memory into the binary // for each piece of text whose size is unknown at compile time and // whose size might change while running the program. // // To support a mutable, growing piece of text, need to allocate an // amount of mem on the heap, unknown at compile time, to hold the // contents. This means: // // 1) The memory must be requested from the OS at runtime. // 2) Need a way of returning the mem to the OS when we're done with // the allocated string. // // First part is done by us: the String::from implementation requests // the memory it needs from the OS. This is pretty standard for most // langs. // // The second part is different. In langs with GCs, it will keep track // and clean up mem that isn't used anymore, and the programmer doesn't // need to think about it. Without a GC, it's the programmer's // responsibility to know when that memory is no longer being used // and call code to explicitly return it. // // This has historically been a *very* difficult problem to solve. // If you forget to, we'll waste memory and leak it. // If we do it too early, we'll have an invalid variable (use after free) // If we do it twice, that's a bug too. // // We need to pair exactly one allocation with one free. // // Rust takes its own unique path: the memory is automatically // returned once the variable that owns it goes out of scope. // When a variable goes out of scope, Rust calls a special function // for us. The function is called drop, and it's where the author // of String can put the code to return the memory. Rust calls // `drop` automatically at the closing }. // // NOTE: C++ calls this pattern of deallocation at the end of its // lifetime RAII. The drop function in Rust is similar to a dtor // // The pattern has a profound impact on the way that Rust code is // written. Might seem simple, but the behavior of code can be // unexpected in more complicated situations when we want to // have multiple variables use the data that's been allocated // on the heap. // // + Ways variables and data interact: Move // Multiple variables can interact with the same data in different // ways in rust: // let x = 5; // let y = x; // // So here, we bind the value of 5 to x, then we make a copy // of the value in x and bind it to y. // We now have to vars x and y and both equal 5. // This is exactly what's happening because integers are simple // values with a known, fixed size, and these two 5 vals are // pushed onto the stack. // // let a = String::from("hello); // let b = a; // // This looks similar and you'd probably assume it behaves the same // way; the second would make a copy of the val in a and bind it to b. // This is not what happens. // // Under the covers, a String is actually a type with a few values: // ptr to some memory holding the string, a length, and a capacity. // This group is stored on the stack. The length is how much memory // in bytes the contents of the String is curreently using. // The capacity is the total amount of memory, in bytes, the String // has received from the OS. Difference between len and cap matters, // but not the point of this. // // When a is assigned to b, the String data is copied, meaning we copy // the pointer, the len, and the cap on the stack. The heap data is // not copied, so b's pointer is going to refer to the same heap // data that a does. // // Earlier we said when a variable goes out of scope, Rust will // automatically call the drop function and clean up the heap mem. // But in this case, both pointers are pointing to the same heap // memory. Thiis is a problem. When a and b go out of scope, they // will both attempt to free the same memory. This is a *double free* // error and is one of the memory safety bugs we mentioned previously. // Freeing mem twice can lead to mem corruption, which can lead // to security vulnerabilities. // // To ensure mem safety, there's another detail to what happens in // this situation in Rust. Instead of trying to copy the allocated // memory, Rust considers a to no longer be valid and therefore, Rust // doesn't need to free anything when a goes out of scope. // // If you were to try to use a after copying it to b, an error // is thrown at compile time. // // The ideas of "shallow copy" and "deep copy" apply here. The // concept of copying the pointer, length, and capacity without // copying the data psounds like a shallow copy. But because rust // also invalidates the first variable, instead of calling this // a shallow copy, it's known as a *move*. We would read this by // saying that a was *moved* into b. // // This solves the problem, because with only b as valid, when it // goes out of scope, it alone will free the mem. // // There is an additional design choice implied by this: // ** Rust will never automatically create "deep" copies of data. ** // Therefore, any *automatic* copying can be assumed to be // inexpensive in terms of runtime performance. // // + Ways variables and data interact: Clones // // If you *do* wawnt to deeply copy the heap data of a String, not // just the stack data, a common method can be used called a *clone*. // // let a = String::from("hello"); // let b = a.clone(); // // When you see a clone call, you know some arbitrary code is being // executed and that code may be expensive. It's an indiator that // something different is going on. // // + Stack only data: clone // There's another wrinkle we haven't talked about yet. This code // is using integers: // let x = 5; // let y = x; // // println!("x = {}, y = {}", x, y); // // This seems to contradict what we said; we don't have to call // clone, but x is still valid and wasn't moved to y. // // The reason is types like integers that have a known size at // compile time are stored *entirely* on the stack, so copies // of the actual values are very quick to make. There's no reason // we would want to prevent x from being valid after we create the // variable y. // // In other words, there's no different between deep and shallow // copying here, so calling clone wouldn't do anything different // from the usual shallow copying and we can leave it out. // // Rust has a special annotation called the Copy trait that can // be placed on types like integres that are stored on the stack. // If a type has the Copy trait, an older variable is still usable // after assignment. Rust won't let us annotate a type with the Copy // trait if the type, or any of its parts, has implemented the Drop // trait. // // If the type needs something special to happen when the value goes // out of scope and we add the Copy annotation to that type, we'll get // a compile time error. // // What types are Copy? Docs can/should be read, but as a general rule, // any group of simple scalar values can be Copy, and nothing that // requires allocation or is some form of resource is Copy. // -> ints, bools, floats, tuples (only if they contain also Copys). // // + Ownership and Functions] // Semantics for passing a value to a function are similar to // assigning a value to a variable. Passing a variable to a func // will move or copy just like assignment. let s = String::from("derpyfoobar"); // s comes into scope takes_ownership(s); // s's value moves into the function... // ... and so is no longer valid here. //println!("{}", s); // COMPILE ERROR! let x = 5; makes_copy(x); println!("{} ", x); // This is fine, because it was a copy. // [Return values and scope] // Returning values can also transfer ownership. Here's an ex // with similar annotations to previous examples: { let baz = gives_ownership(); // gives ownership moves its return // value into baz let duder = String::from("duder"); // duder comes into scope let lucha = takes_and_gives_back(duder); // duder is moves into // takes_and_gives_back, which also moves its return value into lucha println!("lucha! {}", lucha); } // Here lucha goes out of scope and is dropped. duder goes out of // scope but was moved // The ownership of a variable follows the same pattern every time: // **assigning a value to another variable moves it**. When a variable // that includes data on the heap goes out of scope, the value will be // cleaned up by `drop` unless the data has been moved to be owned by // another variable. // // Taking ownership and then returning ownership with every fn is // tedious. What if we need to let a function use a value but not take // ownership? It's quite annoying that anything we pass in also needs // to be passed back if we want to use it again, in addition to any // data resulting from the body of the fn that we may want to return // as well. // // It's possible to return multiple values using a tuple. // But it's still obnoxious to constantly pass back a ton of stuff. // // Rust has a way to address this, and its called references!!! // //////////////////////////////////////////////////////////////////////////////// } fn refs()
fn takes_ownership(some_string: String) { // some_string comes into scope println!("{}", some_string); } // here some string goes out of scope and `drop` is called. The // backing memory is freed. fn makes_copy(some_integer: i32) { // some integer comes into scope. println!("{}", some_integer); } // Here, some_integer goes out of scope. Nothing special happens. fn gives_ownership() -> String { // Gives ownership will move its return // value into the function that calls it let derp = String::from("derp"); // derp comes into scope derp // derp is returned and moves out to the calling function } // takes_and_gives_back will take a String and return one fn takes_and_gives_back(foo: String) -> String { // foo comes into scope foo // foo is returned and moves out to the calling fn }
{ // [References and Borrowing] // The issue with the returning tuple code we've seen elsewhere in // the ownership section is that we have to return the String to // the calling function so we can still use the String after the call. // Here we define calculate_length so that it uses a *reference* to // an object as a param instead of taking ownership of the value. let calc_len = |s: &String| -> usize { s.len() }; let duderington = String::from("duderington"); println!("the length of the string. -> {}", calc_len(&duderington)); // First, all the tuple code in the variable declaration is gone. // We pass the string into the function as &duderington, and in the // definition, we take &String rather than String. // // The ampersands are *references*, and they allow you to refer to // some value without taking ownership over it. // s inside of calc_len becomes a pointer to the String struct bound // to duderington, which itself contains a ptr to the actual string // data on the heap. // // A closer look at the function call: // // let ano = String::from("ano"); // let len = calc_len(&ano); // // The &ano syntax lets us create a ref that refers to the value // of ano, but does not own it. Because it does no own it, the val // it points to will not be dropped when the ref goes out of scope. // // Likewise, the sig of the fn uses & to indicate the type of the // param s is a ref. // // The scope in which the variable s is valid is the same as any // fn param scope, but we don't drop what the ref points to when it // goes out of scope because we don't have ownership. // Functions that have refs as params instead of vals mean we don't // need to return the vals in order to give back ownership, since // we never had ownership in the first place. // // What happens if we try to mutate something we borrowed? // Compiler errors. As vars are immutable by default, so are refs. // We are not allowed to modify a vanilla ref. // // + Mutable refs let change = |some_str: &mut String| { some_str.push_str(" fu."); }; let mut s = String::from("mutref"); change(&mut s); println!("mutref string after stuff: [{}]", s); // Mutable refs have a big caveat: you can only have one mutable ref // to a particular piece of data in a particular scope. This will fail: // // let mut s = String::from("fert"); // // let r1 = &mut s; // let r2 = &mut s; // // This restriction allows for mutation but in a very controlled // fashion. It's something that new Rusters struggle with, because most // langs let you mutate whenever you'd like. Benefit is that Rust // can prevent data races at compile time. // // A data race is a particular type of race condition in which these // three behaviors occur: // 1) Two or more pointers access the same data at the same time // 2) At least one of the pointeres is being used to write to the data // 3) No mechanism being used to sync the access to the data // // Data races cause undefined behavior and can be very difficult to // diagnose and solve when you're trying to figure out what's // happening at runtime. Rust won't even let you compile it. // // Rust will let you create a new scope allowing for multiple mut // refs, but just not *simultaneous* ones! // let mut s = String::from("fert"); { let r1 = &mut s; } //r1 goes out of scope here, so we can make a new ref with no prob let r2 = &mut s; // A similar rule exists for combining immutable refs. // This will error out. //let r1 = &s; //let r2 = &s; //let r3 = &mut s; // // Rust *also* does not allow for a mut ref while we have an // immutable one. Users of an immutable ref don't expect the vals // to change from under them. Multiple immutable refs are okay // because no one who is reading the data has the ability to modify // anybody else's data. // // + Dangling refs // In langs with pointers, it's easy to erroneously create dangling // pointers, or pointers that ref memory that may have been given // to someone else, by freeing some meory while preserving a pointer // to that memory. This is called a use after free. // // If we have a ref to some data, the compiler ensures that the data // will not go out of scope before the ref to the data does. // This will error out: // /* let dangle = || -> &String{ let s = String::from("hello"); &s } let ref_to_nothign = dangle(); */ // This will complain about something we haven't covered yet: // *lifetimes*. // The key here is the return type contains a borrowed value, but there // is no value for it to be borrowed from. // Because s is created inside dangle, when the code of dangle is // finished, s will be deallocated. But we tried to return a ref // to it. That means this ref would be pointing to an invaild String. // Rust won't let us do that. // The solution here is to return the String directly, thus transfering // ownership via a move to the caller of the fn. let no_dangle = || -> String { let s = String::from("no dangle"); s }; let ndstr = no_dangle(); println!("{}", ndstr); // Recapping the rules of refs: // 1) At any given time you can have *either* but not both of: // -> One mutable ref // -> Any number of immutable refs // 2) References must always be valid }
identifier_body
main.rs
fn main() { //scope() //moves_and_mem(); //refs() slices() } //////////////////////////////////////////////////////////////////////////////// // What is Ownership? //////////////////////////////////////////////////////////////////////////////// // Ownership is Rust's central feature. // All programs have to manage the way they use a computer's memory while // running. Some have garbage collection that constantly looks for no // longer used memory as the program runs; in others, the programmer must // explicitly allocate and release the memory. Rust uses a third approach: // // Memory is managed through a system of ownership with a set of rules // that the compiler checks at compile time. No run-time costs are // incurred for any of the ownership features. // // [Stack and Heap] // In most langs, you really don't need to think about the stack and // the heap often. In systems langs, whether the value is on the stack // or on the heap has more of an effect on how the language behaves and // why we have to make certain decisions. // // The stack and the heap are parts of memory that are available to your // code to use at runtime, but they are structured differently. The stack // stores values in the order it gets them and removes the values in // the opposite order (LIFO). This is referred to as // *pushing onto the stack* and *popping off of the stack* // // It's fast because of the way it accesses the data: it never has to // search for a place to put new data or a place to get data from because // that place is *always* the top of the stack. Another propery is that // all data on the stack must take up a known, fixed size. // // For data that is an unknown size at compile time or a size that may // changeo ver time, we can store that data on the heap instead. The heap // is less organized; we just ask for some amount of space. The OS // finds an empty spot somewhere that's big enough for the request, marks // it as in use, and returns a pointer to that location. It's called // *allocating on the heap*. Pushing onto the stack is not considered // allocation. A pointer is a known, fixed size, so it can sit on the // heap, but for actual data, we have to follow the pointer. // // The heap is slower than the stack because we have to follow a pointer // to get there (a level of indirection). Processors are faster due to // temporal and spacial locality and caching if they have to jump around // less. // // When a function is called, the values passed into the function // (including, potentially pointers to data on the heap) and the fns // local vars get pushed onto the stack. When its over, the vals get // popped off the stack. // // !! // Keeping track of what code is using what data on the heap, minimizing // duplicate data on the heap, and cleaning up unused data on the heap // so we don't run out of space are all problems that ownership helps. // Once ownership is understood, you won't have to think about the stack // and the heap often, but knowing that managing heap data is why // ownership exists can help explain why it works the way that it does. // !! // // [Ownership Rules] // There exist 3 very important rules to ownership in Rust: // // 1) Each value in Rust has a variable that's called its *owner* // 2) There can only be one owner at a time (the highlander rule) // 3) When the owner goes out of scope, the value will be dropped // // + Variable Scope // See scope() // // + Memory and Allocation // See moves_and_mem // // + References // See refs() fn scope() { // First example of ownership, we'll look at the *scope* of some // variables. Scope is the range within a program for which an item // is valid. // s is a string literal. the value of the string is hardcoded into // the text of the program. The variable is valid from the point // at which it's declared until the end of the current *scope*. { // s is not valid here, it's not yet declared let s = "hello"; // s is valid from this point forwards // do some stuff with s } // this scope is now over, and s is no longer valid // There are two important points in time here: // 1) When s comes *into* scope, it is valid. // 2) s remains valid until it is *out of scope* // // // The relationship between scopes and when variables are valid // is similar to other programming langs. Let's build on top // of this introducing the String type. // // + String type // We're going to illustrate the rules of ownership using a data type // that's more complex than the ones we've seen before. All the data // types we've seen before are stored on the stack and popped off the // stack when their scope is over, but we want to look at data // that's on the heap and explore how Rust knows to clean that up. // // We'll concentrate on the parts of String that relate to ownership. // They also apply to other complex data types provided by the // stdlib and those that you create. // // We've seen string literals hardcoded into the program. They're // convenient, but they aren't suitable for every situation in which // you want to use text. For one reason, they're immutable. Also, not // every string value is known when we write our code. The other type // is a String, which is allocated on the heap. It's able to store an // amount of text that is unknown at compile time. It's created from // a literal with a `from` function: let s = String::from("hello"); // Again, double colon (::) is an op that allows us to namespace // this from function under the String type rather than using a name // like string_from. It can be mutated: let mut s = String::from("hello"); s.push_str(", world!"); // appends a literal to a String println!("{}", s); // Will print the full string. // Why can Strings be mutated but literals cannot? Difference is // how they deal with memory. } fn
() { // With string literals, we know the contents of the string at compile // time, so the text is literally hardcoded into the executable, // making them extremely fast and efficient. This property only comes // from its immutability. We can't put a blob of memory into the binary // for each piece of text whose size is unknown at compile time and // whose size might change while running the program. // // To support a mutable, growing piece of text, need to allocate an // amount of mem on the heap, unknown at compile time, to hold the // contents. This means: // // 1) The memory must be requested from the OS at runtime. // 2) Need a way of returning the mem to the OS when we're done with // the allocated string. // // First part is done by us: the String::from implementation requests // the memory it needs from the OS. This is pretty standard for most // langs. // // The second part is different. In langs with GCs, it will keep track // and clean up mem that isn't used anymore, and the programmer doesn't // need to think about it. Without a GC, it's the programmer's // responsibility to know when that memory is no longer being used // and call code to explicitly return it. // // This has historically been a *very* difficult problem to solve. // If you forget to, we'll waste memory and leak it. // If we do it too early, we'll have an invalid variable (use after free) // If we do it twice, that's a bug too. // // We need to pair exactly one allocation with one free. // // Rust takes its own unique path: the memory is automatically // returned once the variable that owns it goes out of scope. // When a variable goes out of scope, Rust calls a special function // for us. The function is called drop, and it's where the author // of String can put the code to return the memory. Rust calls // `drop` automatically at the closing }. // // NOTE: C++ calls this pattern of deallocation at the end of its // lifetime RAII. The drop function in Rust is similar to a dtor // // The pattern has a profound impact on the way that Rust code is // written. Might seem simple, but the behavior of code can be // unexpected in more complicated situations when we want to // have multiple variables use the data that's been allocated // on the heap. // // + Ways variables and data interact: Move // Multiple variables can interact with the same data in different // ways in rust: // let x = 5; // let y = x; // // So here, we bind the value of 5 to x, then we make a copy // of the value in x and bind it to y. // We now have to vars x and y and both equal 5. // This is exactly what's happening because integers are simple // values with a known, fixed size, and these two 5 vals are // pushed onto the stack. // // let a = String::from("hello); // let b = a; // // This looks similar and you'd probably assume it behaves the same // way; the second would make a copy of the val in a and bind it to b. // This is not what happens. // // Under the covers, a String is actually a type with a few values: // ptr to some memory holding the string, a length, and a capacity. // This group is stored on the stack. The length is how much memory // in bytes the contents of the String is curreently using. // The capacity is the total amount of memory, in bytes, the String // has received from the OS. Difference between len and cap matters, // but not the point of this. // // When a is assigned to b, the String data is copied, meaning we copy // the pointer, the len, and the cap on the stack. The heap data is // not copied, so b's pointer is going to refer to the same heap // data that a does. // // Earlier we said when a variable goes out of scope, Rust will // automatically call the drop function and clean up the heap mem. // But in this case, both pointers are pointing to the same heap // memory. Thiis is a problem. When a and b go out of scope, they // will both attempt to free the same memory. This is a *double free* // error and is one of the memory safety bugs we mentioned previously. // Freeing mem twice can lead to mem corruption, which can lead // to security vulnerabilities. // // To ensure mem safety, there's another detail to what happens in // this situation in Rust. Instead of trying to copy the allocated // memory, Rust considers a to no longer be valid and therefore, Rust // doesn't need to free anything when a goes out of scope. // // If you were to try to use a after copying it to b, an error // is thrown at compile time. // // The ideas of "shallow copy" and "deep copy" apply here. The // concept of copying the pointer, length, and capacity without // copying the data psounds like a shallow copy. But because rust // also invalidates the first variable, instead of calling this // a shallow copy, it's known as a *move*. We would read this by // saying that a was *moved* into b. // // This solves the problem, because with only b as valid, when it // goes out of scope, it alone will free the mem. // // There is an additional design choice implied by this: // ** Rust will never automatically create "deep" copies of data. ** // Therefore, any *automatic* copying can be assumed to be // inexpensive in terms of runtime performance. // // + Ways variables and data interact: Clones // // If you *do* wawnt to deeply copy the heap data of a String, not // just the stack data, a common method can be used called a *clone*. // // let a = String::from("hello"); // let b = a.clone(); // // When you see a clone call, you know some arbitrary code is being // executed and that code may be expensive. It's an indiator that // something different is going on. // // + Stack only data: clone // There's another wrinkle we haven't talked about yet. This code // is using integers: // let x = 5; // let y = x; // // println!("x = {}, y = {}", x, y); // // This seems to contradict what we said; we don't have to call // clone, but x is still valid and wasn't moved to y. // // The reason is types like integers that have a known size at // compile time are stored *entirely* on the stack, so copies // of the actual values are very quick to make. There's no reason // we would want to prevent x from being valid after we create the // variable y. // // In other words, there's no different between deep and shallow // copying here, so calling clone wouldn't do anything different // from the usual shallow copying and we can leave it out. // // Rust has a special annotation called the Copy trait that can // be placed on types like integres that are stored on the stack. // If a type has the Copy trait, an older variable is still usable // after assignment. Rust won't let us annotate a type with the Copy // trait if the type, or any of its parts, has implemented the Drop // trait. // // If the type needs something special to happen when the value goes // out of scope and we add the Copy annotation to that type, we'll get // a compile time error. // // What types are Copy? Docs can/should be read, but as a general rule, // any group of simple scalar values can be Copy, and nothing that // requires allocation or is some form of resource is Copy. // -> ints, bools, floats, tuples (only if they contain also Copys). // // + Ownership and Functions] // Semantics for passing a value to a function are similar to // assigning a value to a variable. Passing a variable to a func // will move or copy just like assignment. let s = String::from("derpyfoobar"); // s comes into scope takes_ownership(s); // s's value moves into the function... // ... and so is no longer valid here. //println!("{}", s); // COMPILE ERROR! let x = 5; makes_copy(x); println!("{} ", x); // This is fine, because it was a copy. // [Return values and scope] // Returning values can also transfer ownership. Here's an ex // with similar annotations to previous examples: { let baz = gives_ownership(); // gives ownership moves its return // value into baz let duder = String::from("duder"); // duder comes into scope let lucha = takes_and_gives_back(duder); // duder is moves into // takes_and_gives_back, which also moves its return value into lucha println!("lucha! {}", lucha); } // Here lucha goes out of scope and is dropped. duder goes out of // scope but was moved // The ownership of a variable follows the same pattern every time: // **assigning a value to another variable moves it**. When a variable // that includes data on the heap goes out of scope, the value will be // cleaned up by `drop` unless the data has been moved to be owned by // another variable. // // Taking ownership and then returning ownership with every fn is // tedious. What if we need to let a function use a value but not take // ownership? It's quite annoying that anything we pass in also needs // to be passed back if we want to use it again, in addition to any // data resulting from the body of the fn that we may want to return // as well. // // It's possible to return multiple values using a tuple. // But it's still obnoxious to constantly pass back a ton of stuff. // // Rust has a way to address this, and its called references!!! // //////////////////////////////////////////////////////////////////////////////// } fn refs() { // [References and Borrowing] // The issue with the returning tuple code we've seen elsewhere in // the ownership section is that we have to return the String to // the calling function so we can still use the String after the call. // Here we define calculate_length so that it uses a *reference* to // an object as a param instead of taking ownership of the value. let calc_len = |s: &String| -> usize { s.len() }; let duderington = String::from("duderington"); println!("the length of the string. -> {}", calc_len(&duderington)); // First, all the tuple code in the variable declaration is gone. // We pass the string into the function as &duderington, and in the // definition, we take &String rather than String. // // The ampersands are *references*, and they allow you to refer to // some value without taking ownership over it. // s inside of calc_len becomes a pointer to the String struct bound // to duderington, which itself contains a ptr to the actual string // data on the heap. // // A closer look at the function call: // // let ano = String::from("ano"); // let len = calc_len(&ano); // // The &ano syntax lets us create a ref that refers to the value // of ano, but does not own it. Because it does no own it, the val // it points to will not be dropped when the ref goes out of scope. // // Likewise, the sig of the fn uses & to indicate the type of the // param s is a ref. // // The scope in which the variable s is valid is the same as any // fn param scope, but we don't drop what the ref points to when it // goes out of scope because we don't have ownership. // Functions that have refs as params instead of vals mean we don't // need to return the vals in order to give back ownership, since // we never had ownership in the first place. // // What happens if we try to mutate something we borrowed? // Compiler errors. As vars are immutable by default, so are refs. // We are not allowed to modify a vanilla ref. // // + Mutable refs let change = |some_str: &mut String| { some_str.push_str(" fu."); }; let mut s = String::from("mutref"); change(&mut s); println!("mutref string after stuff: [{}]", s); // Mutable refs have a big caveat: you can only have one mutable ref // to a particular piece of data in a particular scope. This will fail: // // let mut s = String::from("fert"); // // let r1 = &mut s; // let r2 = &mut s; // // This restriction allows for mutation but in a very controlled // fashion. It's something that new Rusters struggle with, because most // langs let you mutate whenever you'd like. Benefit is that Rust // can prevent data races at compile time. // // A data race is a particular type of race condition in which these // three behaviors occur: // 1) Two or more pointers access the same data at the same time // 2) At least one of the pointeres is being used to write to the data // 3) No mechanism being used to sync the access to the data // // Data races cause undefined behavior and can be very difficult to // diagnose and solve when you're trying to figure out what's // happening at runtime. Rust won't even let you compile it. // // Rust will let you create a new scope allowing for multiple mut // refs, but just not *simultaneous* ones! // let mut s = String::from("fert"); { let r1 = &mut s; } //r1 goes out of scope here, so we can make a new ref with no prob let r2 = &mut s; // A similar rule exists for combining immutable refs. // This will error out. //let r1 = &s; //let r2 = &s; //let r3 = &mut s; // // Rust *also* does not allow for a mut ref while we have an // immutable one. Users of an immutable ref don't expect the vals // to change from under them. Multiple immutable refs are okay // because no one who is reading the data has the ability to modify // anybody else's data. // // + Dangling refs // In langs with pointers, it's easy to erroneously create dangling // pointers, or pointers that ref memory that may have been given // to someone else, by freeing some meory while preserving a pointer // to that memory. This is called a use after free. // // If we have a ref to some data, the compiler ensures that the data // will not go out of scope before the ref to the data does. // This will error out: // /* let dangle = || -> &String{ let s = String::from("hello"); &s } let ref_to_nothign = dangle(); */ // This will complain about something we haven't covered yet: // *lifetimes*. // The key here is the return type contains a borrowed value, but there // is no value for it to be borrowed from. // Because s is created inside dangle, when the code of dangle is // finished, s will be deallocated. But we tried to return a ref // to it. That means this ref would be pointing to an invaild String. // Rust won't let us do that. // The solution here is to return the String directly, thus transfering // ownership via a move to the caller of the fn. let no_dangle = || -> String { let s = String::from("no dangle"); s }; let ndstr = no_dangle(); println!("{}", ndstr); // Recapping the rules of refs: // 1) At any given time you can have *either* but not both of: // -> One mutable ref // -> Any number of immutable refs // 2) References must always be valid } fn takes_ownership(some_string: String) { // some_string comes into scope println!("{}", some_string); } // here some string goes out of scope and `drop` is called. The // backing memory is freed. fn makes_copy(some_integer: i32) { // some integer comes into scope. println!("{}", some_integer); } // Here, some_integer goes out of scope. Nothing special happens. fn gives_ownership() -> String { // Gives ownership will move its return // value into the function that calls it let derp = String::from("derp"); // derp comes into scope derp // derp is returned and moves out to the calling function } // takes_and_gives_back will take a String and return one fn takes_and_gives_back(foo: String) -> String { // foo comes into scope foo // foo is returned and moves out to the calling fn }
moves_and_mem
identifier_name
main.rs
fn main() { //scope() //moves_and_mem(); //refs() slices() } //////////////////////////////////////////////////////////////////////////////// // What is Ownership? //////////////////////////////////////////////////////////////////////////////// // Ownership is Rust's central feature. // All programs have to manage the way they use a computer's memory while // running. Some have garbage collection that constantly looks for no // longer used memory as the program runs; in others, the programmer must // explicitly allocate and release the memory. Rust uses a third approach: // // Memory is managed through a system of ownership with a set of rules // that the compiler checks at compile time. No run-time costs are // incurred for any of the ownership features. // // [Stack and Heap] // In most langs, you really don't need to think about the stack and // the heap often. In systems langs, whether the value is on the stack // or on the heap has more of an effect on how the language behaves and // why we have to make certain decisions. // // The stack and the heap are parts of memory that are available to your // code to use at runtime, but they are structured differently. The stack
// the opposite order (LIFO). This is referred to as // *pushing onto the stack* and *popping off of the stack* // // It's fast because of the way it accesses the data: it never has to // search for a place to put new data or a place to get data from because // that place is *always* the top of the stack. Another propery is that // all data on the stack must take up a known, fixed size. // // For data that is an unknown size at compile time or a size that may // changeo ver time, we can store that data on the heap instead. The heap // is less organized; we just ask for some amount of space. The OS // finds an empty spot somewhere that's big enough for the request, marks // it as in use, and returns a pointer to that location. It's called // *allocating on the heap*. Pushing onto the stack is not considered // allocation. A pointer is a known, fixed size, so it can sit on the // heap, but for actual data, we have to follow the pointer. // // The heap is slower than the stack because we have to follow a pointer // to get there (a level of indirection). Processors are faster due to // temporal and spacial locality and caching if they have to jump around // less. // // When a function is called, the values passed into the function // (including, potentially pointers to data on the heap) and the fns // local vars get pushed onto the stack. When its over, the vals get // popped off the stack. // // !! // Keeping track of what code is using what data on the heap, minimizing // duplicate data on the heap, and cleaning up unused data on the heap // so we don't run out of space are all problems that ownership helps. // Once ownership is understood, you won't have to think about the stack // and the heap often, but knowing that managing heap data is why // ownership exists can help explain why it works the way that it does. // !! // // [Ownership Rules] // There exist 3 very important rules to ownership in Rust: // // 1) Each value in Rust has a variable that's called its *owner* // 2) There can only be one owner at a time (the highlander rule) // 3) When the owner goes out of scope, the value will be dropped // // + Variable Scope // See scope() // // + Memory and Allocation // See moves_and_mem // // + References // See refs() fn scope() { // First example of ownership, we'll look at the *scope* of some // variables. Scope is the range within a program for which an item // is valid. // s is a string literal. the value of the string is hardcoded into // the text of the program. The variable is valid from the point // at which it's declared until the end of the current *scope*. { // s is not valid here, it's not yet declared let s = "hello"; // s is valid from this point forwards // do some stuff with s } // this scope is now over, and s is no longer valid // There are two important points in time here: // 1) When s comes *into* scope, it is valid. // 2) s remains valid until it is *out of scope* // // // The relationship between scopes and when variables are valid // is similar to other programming langs. Let's build on top // of this introducing the String type. // // + String type // We're going to illustrate the rules of ownership using a data type // that's more complex than the ones we've seen before. All the data // types we've seen before are stored on the stack and popped off the // stack when their scope is over, but we want to look at data // that's on the heap and explore how Rust knows to clean that up. // // We'll concentrate on the parts of String that relate to ownership. // They also apply to other complex data types provided by the // stdlib and those that you create. // // We've seen string literals hardcoded into the program. They're // convenient, but they aren't suitable for every situation in which // you want to use text. For one reason, they're immutable. Also, not // every string value is known when we write our code. The other type // is a String, which is allocated on the heap. It's able to store an // amount of text that is unknown at compile time. It's created from // a literal with a `from` function: let s = String::from("hello"); // Again, double colon (::) is an op that allows us to namespace // this from function under the String type rather than using a name // like string_from. It can be mutated: let mut s = String::from("hello"); s.push_str(", world!"); // appends a literal to a String println!("{}", s); // Will print the full string. // Why can Strings be mutated but literals cannot? Difference is // how they deal with memory. } fn moves_and_mem() { // With string literals, we know the contents of the string at compile // time, so the text is literally hardcoded into the executable, // making them extremely fast and efficient. This property only comes // from its immutability. We can't put a blob of memory into the binary // for each piece of text whose size is unknown at compile time and // whose size might change while running the program. // // To support a mutable, growing piece of text, need to allocate an // amount of mem on the heap, unknown at compile time, to hold the // contents. This means: // // 1) The memory must be requested from the OS at runtime. // 2) Need a way of returning the mem to the OS when we're done with // the allocated string. // // First part is done by us: the String::from implementation requests // the memory it needs from the OS. This is pretty standard for most // langs. // // The second part is different. In langs with GCs, it will keep track // and clean up mem that isn't used anymore, and the programmer doesn't // need to think about it. Without a GC, it's the programmer's // responsibility to know when that memory is no longer being used // and call code to explicitly return it. // // This has historically been a *very* difficult problem to solve. // If you forget to, we'll waste memory and leak it. // If we do it too early, we'll have an invalid variable (use after free) // If we do it twice, that's a bug too. // // We need to pair exactly one allocation with one free. // // Rust takes its own unique path: the memory is automatically // returned once the variable that owns it goes out of scope. // When a variable goes out of scope, Rust calls a special function // for us. The function is called drop, and it's where the author // of String can put the code to return the memory. Rust calls // `drop` automatically at the closing }. // // NOTE: C++ calls this pattern of deallocation at the end of its // lifetime RAII. The drop function in Rust is similar to a dtor // // The pattern has a profound impact on the way that Rust code is // written. Might seem simple, but the behavior of code can be // unexpected in more complicated situations when we want to // have multiple variables use the data that's been allocated // on the heap. // // + Ways variables and data interact: Move // Multiple variables can interact with the same data in different // ways in rust: // let x = 5; // let y = x; // // So here, we bind the value of 5 to x, then we make a copy // of the value in x and bind it to y. // We now have to vars x and y and both equal 5. // This is exactly what's happening because integers are simple // values with a known, fixed size, and these two 5 vals are // pushed onto the stack. // // let a = String::from("hello); // let b = a; // // This looks similar and you'd probably assume it behaves the same // way; the second would make a copy of the val in a and bind it to b. // This is not what happens. // // Under the covers, a String is actually a type with a few values: // ptr to some memory holding the string, a length, and a capacity. // This group is stored on the stack. The length is how much memory // in bytes the contents of the String is curreently using. // The capacity is the total amount of memory, in bytes, the String // has received from the OS. Difference between len and cap matters, // but not the point of this. // // When a is assigned to b, the String data is copied, meaning we copy // the pointer, the len, and the cap on the stack. The heap data is // not copied, so b's pointer is going to refer to the same heap // data that a does. // // Earlier we said when a variable goes out of scope, Rust will // automatically call the drop function and clean up the heap mem. // But in this case, both pointers are pointing to the same heap // memory. Thiis is a problem. When a and b go out of scope, they // will both attempt to free the same memory. This is a *double free* // error and is one of the memory safety bugs we mentioned previously. // Freeing mem twice can lead to mem corruption, which can lead // to security vulnerabilities. // // To ensure mem safety, there's another detail to what happens in // this situation in Rust. Instead of trying to copy the allocated // memory, Rust considers a to no longer be valid and therefore, Rust // doesn't need to free anything when a goes out of scope. // // If you were to try to use a after copying it to b, an error // is thrown at compile time. // // The ideas of "shallow copy" and "deep copy" apply here. The // concept of copying the pointer, length, and capacity without // copying the data psounds like a shallow copy. But because rust // also invalidates the first variable, instead of calling this // a shallow copy, it's known as a *move*. We would read this by // saying that a was *moved* into b. // // This solves the problem, because with only b as valid, when it // goes out of scope, it alone will free the mem. // // There is an additional design choice implied by this: // ** Rust will never automatically create "deep" copies of data. ** // Therefore, any *automatic* copying can be assumed to be // inexpensive in terms of runtime performance. // // + Ways variables and data interact: Clones // // If you *do* wawnt to deeply copy the heap data of a String, not // just the stack data, a common method can be used called a *clone*. // // let a = String::from("hello"); // let b = a.clone(); // // When you see a clone call, you know some arbitrary code is being // executed and that code may be expensive. It's an indiator that // something different is going on. // // + Stack only data: clone // There's another wrinkle we haven't talked about yet. This code // is using integers: // let x = 5; // let y = x; // // println!("x = {}, y = {}", x, y); // // This seems to contradict what we said; we don't have to call // clone, but x is still valid and wasn't moved to y. // // The reason is types like integers that have a known size at // compile time are stored *entirely* on the stack, so copies // of the actual values are very quick to make. There's no reason // we would want to prevent x from being valid after we create the // variable y. // // In other words, there's no different between deep and shallow // copying here, so calling clone wouldn't do anything different // from the usual shallow copying and we can leave it out. // // Rust has a special annotation called the Copy trait that can // be placed on types like integres that are stored on the stack. // If a type has the Copy trait, an older variable is still usable // after assignment. Rust won't let us annotate a type with the Copy // trait if the type, or any of its parts, has implemented the Drop // trait. // // If the type needs something special to happen when the value goes // out of scope and we add the Copy annotation to that type, we'll get // a compile time error. // // What types are Copy? Docs can/should be read, but as a general rule, // any group of simple scalar values can be Copy, and nothing that // requires allocation or is some form of resource is Copy. // -> ints, bools, floats, tuples (only if they contain also Copys). // // + Ownership and Functions] // Semantics for passing a value to a function are similar to // assigning a value to a variable. Passing a variable to a func // will move or copy just like assignment. let s = String::from("derpyfoobar"); // s comes into scope takes_ownership(s); // s's value moves into the function... // ... and so is no longer valid here. //println!("{}", s); // COMPILE ERROR! let x = 5; makes_copy(x); println!("{} ", x); // This is fine, because it was a copy. // [Return values and scope] // Returning values can also transfer ownership. Here's an ex // with similar annotations to previous examples: { let baz = gives_ownership(); // gives ownership moves its return // value into baz let duder = String::from("duder"); // duder comes into scope let lucha = takes_and_gives_back(duder); // duder is moves into // takes_and_gives_back, which also moves its return value into lucha println!("lucha! {}", lucha); } // Here lucha goes out of scope and is dropped. duder goes out of // scope but was moved // The ownership of a variable follows the same pattern every time: // **assigning a value to another variable moves it**. When a variable // that includes data on the heap goes out of scope, the value will be // cleaned up by `drop` unless the data has been moved to be owned by // another variable. // // Taking ownership and then returning ownership with every fn is // tedious. What if we need to let a function use a value but not take // ownership? It's quite annoying that anything we pass in also needs // to be passed back if we want to use it again, in addition to any // data resulting from the body of the fn that we may want to return // as well. // // It's possible to return multiple values using a tuple. // But it's still obnoxious to constantly pass back a ton of stuff. // // Rust has a way to address this, and its called references!!! // //////////////////////////////////////////////////////////////////////////////// } fn refs() { // [References and Borrowing] // The issue with the returning tuple code we've seen elsewhere in // the ownership section is that we have to return the String to // the calling function so we can still use the String after the call. // Here we define calculate_length so that it uses a *reference* to // an object as a param instead of taking ownership of the value. let calc_len = |s: &String| -> usize { s.len() }; let duderington = String::from("duderington"); println!("the length of the string. -> {}", calc_len(&duderington)); // First, all the tuple code in the variable declaration is gone. // We pass the string into the function as &duderington, and in the // definition, we take &String rather than String. // // The ampersands are *references*, and they allow you to refer to // some value without taking ownership over it. // s inside of calc_len becomes a pointer to the String struct bound // to duderington, which itself contains a ptr to the actual string // data on the heap. // // A closer look at the function call: // // let ano = String::from("ano"); // let len = calc_len(&ano); // // The &ano syntax lets us create a ref that refers to the value // of ano, but does not own it. Because it does no own it, the val // it points to will not be dropped when the ref goes out of scope. // // Likewise, the sig of the fn uses & to indicate the type of the // param s is a ref. // // The scope in which the variable s is valid is the same as any // fn param scope, but we don't drop what the ref points to when it // goes out of scope because we don't have ownership. // Functions that have refs as params instead of vals mean we don't // need to return the vals in order to give back ownership, since // we never had ownership in the first place. // // What happens if we try to mutate something we borrowed? // Compiler errors. As vars are immutable by default, so are refs. // We are not allowed to modify a vanilla ref. // // + Mutable refs let change = |some_str: &mut String| { some_str.push_str(" fu."); }; let mut s = String::from("mutref"); change(&mut s); println!("mutref string after stuff: [{}]", s); // Mutable refs have a big caveat: you can only have one mutable ref // to a particular piece of data in a particular scope. This will fail: // // let mut s = String::from("fert"); // // let r1 = &mut s; // let r2 = &mut s; // // This restriction allows for mutation but in a very controlled // fashion. It's something that new Rusters struggle with, because most // langs let you mutate whenever you'd like. Benefit is that Rust // can prevent data races at compile time. // // A data race is a particular type of race condition in which these // three behaviors occur: // 1) Two or more pointers access the same data at the same time // 2) At least one of the pointeres is being used to write to the data // 3) No mechanism being used to sync the access to the data // // Data races cause undefined behavior and can be very difficult to // diagnose and solve when you're trying to figure out what's // happening at runtime. Rust won't even let you compile it. // // Rust will let you create a new scope allowing for multiple mut // refs, but just not *simultaneous* ones! // let mut s = String::from("fert"); { let r1 = &mut s; } //r1 goes out of scope here, so we can make a new ref with no prob let r2 = &mut s; // A similar rule exists for combining immutable refs. // This will error out. //let r1 = &s; //let r2 = &s; //let r3 = &mut s; // // Rust *also* does not allow for a mut ref while we have an // immutable one. Users of an immutable ref don't expect the vals // to change from under them. Multiple immutable refs are okay // because no one who is reading the data has the ability to modify // anybody else's data. // // + Dangling refs // In langs with pointers, it's easy to erroneously create dangling // pointers, or pointers that ref memory that may have been given // to someone else, by freeing some meory while preserving a pointer // to that memory. This is called a use after free. // // If we have a ref to some data, the compiler ensures that the data // will not go out of scope before the ref to the data does. // This will error out: // /* let dangle = || -> &String{ let s = String::from("hello"); &s } let ref_to_nothign = dangle(); */ // This will complain about something we haven't covered yet: // *lifetimes*. // The key here is the return type contains a borrowed value, but there // is no value for it to be borrowed from. // Because s is created inside dangle, when the code of dangle is // finished, s will be deallocated. But we tried to return a ref // to it. That means this ref would be pointing to an invaild String. // Rust won't let us do that. // The solution here is to return the String directly, thus transfering // ownership via a move to the caller of the fn. let no_dangle = || -> String { let s = String::from("no dangle"); s }; let ndstr = no_dangle(); println!("{}", ndstr); // Recapping the rules of refs: // 1) At any given time you can have *either* but not both of: // -> One mutable ref // -> Any number of immutable refs // 2) References must always be valid } fn takes_ownership(some_string: String) { // some_string comes into scope println!("{}", some_string); } // here some string goes out of scope and `drop` is called. The // backing memory is freed. fn makes_copy(some_integer: i32) { // some integer comes into scope. println!("{}", some_integer); } // Here, some_integer goes out of scope. Nothing special happens. fn gives_ownership() -> String { // Gives ownership will move its return // value into the function that calls it let derp = String::from("derp"); // derp comes into scope derp // derp is returned and moves out to the calling function } // takes_and_gives_back will take a String and return one fn takes_and_gives_back(foo: String) -> String { // foo comes into scope foo // foo is returned and moves out to the calling fn }
// stores values in the order it gets them and removes the values in
random_line_split
audition.js
// pages/audition/audition.js var qcloud = require('../../vendor/wafer2-client-sdk/index') var config = require('../../config.js') var util = require('../../utils/util.js') var moment = require('../../vendor/moment.min') var WxParse = require('../../vendor/wxParse/wxParse.js'); import service from '../../utils/service'; Page({ /** * 页面的初始数据 */ data: { currentStep: 1, disabled: true, // 按钮禁用 fixed: true, // 按钮悬浮 classNames: '', disabledClassName: 'disabled', opened: false, paper: {}, mainEnded: false, optEnded: false, isAdmin: false, preFinished: false, //预习过 isPreview: false, // 试听版 finished: false, // 正课是否完成 optFinished: false, //选修课是否完成 optRecordFinished: false, //选修课录音是否听过 localAudioState: { audios: {}, optAudios: {}, } }, onShow: function(options){ this.doRequest(this.data.options); wx.getStorage({ key: 'admin', success: (res) => { if(res.data) { this.setData({ isAdmin: true, disabledClassName: '' }); } }, }); wx.getStorage({ key: 'record_' + this.data.paper.id, success: (res) => { if(res.data) { this.setData({ localAudioState: res.data }); for(var idx in res.data.audios) { switch(idx) { case 1: this.setData({ firstFinished: true }); break; case 2: this.setData({ secondFinished: true }); break; case 3: this.setData({ audioCycleEnded: true }); break; } } for(var idx in res.data.optAudios) { this.setData({ optRecordFinished: true, optFinished: true }); } for(var idx in res.data.preAudios) { this.setData({ preAudioFinshed: true }); } } } }); }, /** * 生命周期函数--监听页面加载 */ onLoad: function (options) { var that = this this.setData({ options, isPreview: !!options.isPreview, //试听 }) if (options.mode == 'opt') { //选修 this.setData({ currentStep: 4 }); } }, doRequest: function(options) { var that = this getApp().ready(() => { console.log('ready') this.initPageData(_ => { let { mainEnded, optEnded } = this.data if (options.main == 'done' && mainEnded) { let step = 3, optFinished = false // if (optEnded) { // step = 4 // optFinished = true // } this.setData({ currentStep: step, audioCycleEnded: true, optFinished: optFinished }) } wx.getStorage({ key: 'semester_detail_' + this.data.paper.semesterId, success: (res) => { let current; if (current = res.data.statistical.find((item) => item.paperId == this.data.paper.id)) { this.setData({ disabledClassName: '', finished: true }); } }, }) wx.getStorage({ key: 'paper_' + this.data.paper.id, success: (res) => { if (res.data == 'finished') { this.setData({ preFinished: true }) } }, }) wx.getStorage({ key: 'optRecord_' + this.data.paper.id, success: (res) => { if (res.data) { this.setData({ optRecordFinished: true }) } }, }) }) }) wx.getSetting({ success: function (res) { if (!res.authSetting['scope.userInfo']) { wx.openSetting({ success: function (res) { if (res.authSetting['scope.userInfo']) { that.initPageData() } } }) } } }) }, initPageData: function (cb){ var that = this; wx.showLoading({ title: '加载中', }) let url = '', method = 'GET', data, header; if(that.data.options.paperId) { url = config.service.paperUrl + '/' + (that.data.options.paperId || 4); } else if (that.data.options.semesterId) { url = config.service.todayPaperUrl; method = 'POST'; data = { openId: getApp().globalData.userInfo.openId, semesterId: that.data.options.semesterId, readToday: that.data.options.date || util.getCurrentDate() } header = { 'Content-Type': 'application/x-www-form-urlencoded' } } qcloud.request({ url: url, data: data, header: header, method: method, login: true, success(result) { if (result.statusCode != 200) { let msg = '系统异常,请联系管理员'; if (result.data.code == 4042) { msg = '该课程已删除'; } else if (result.data.code == 4043) { msg = '本学期已结束'; } wx.showModal({ title: '提示', content: msg, showCancel: false, success: function (res) { if (res.confirm) { wx.navigateBack({ delta: 1 }) } } }) return } // 跳转到听写 if (result.data.type == 3) { wx.redirectTo({ url: '/pages/dictation/dictation?paperId=' + result.data.id }) return; } // 跳转到测试 if (result.data.type == 2) { wx.redirectTo({ url: '/pages/test/test?paperId=' + result.data.id }) return; } let content = JSON.parse(result.data.content); content.audios.forEach(audio => { audio.key = Math.random() * 100000 }) content.optAudios.forEach(audio => { audio.key = Math.random() * 100000 }) let mainEnded = wx.getStorageSync('paper_' + result.data.id) let optEnded = wx.getStorageSync('optPaper_' + result.data.id) that.setData({ paper: result.data, content: content, mainEnded: mainEnded, optEnded: optEnded }) WxParse.wxParse('original', 'html', content.original, that, 5); WxParse.wxParse('handout', 'html', content.handout, that, 5); WxParse.wxParse('thirdHandout', 'html', content.thirdHandout, that, 5); WxParse.wxParse('optHandout', 'html', content.optHandout, that, 5); if (that.data.currentStep == 1 && content.preAudio) { util.showToast("Tout écouter pour passer à l’étape suivante. ", 3000) } cb && cb() }, fail(error) { // util.showModel('请求失败', error) console.log('request fail', error) }, complete() { wx.hideLoading() } }) }, /** * 生命周期函数--监听页面初次渲染完成 */ onReady: function () { }, /** * 下一步 */ next: function() { const step = this.data.currentStep + 1; this.setData({ currentStep: step, fixed: true }) if (this.data.currentStep == 3) { setTimeout(() => { util.showToast("Lire au moins cinq fois pour passer l’étape suivante.", 2500) } ,1000) } this.stopAudio() }, toggle: function(e) { var key = e.target.dataset.target this.setData({ [key]: !this.data[key] }) }, // 本地保存音频状态 saveLocalState: function(idx, audioKey) { audioKey = audioKey || 'audios'; var key = 'record_' + this.data.paper.id; wx.getStorage({ key: key, success: function(res) { res.data[audioKey] = res.data[audioKey] || {}; res.data[audioKey][idx] = true; wx.setStorage({ key: key, data: res.data }) }, fail: function() { wx.setStorage({ key: key, data: { [audioKey]: { [idx]: true } } }); } }); }, /** * 音频播放结束 */ onPreAudioEnded: function() { var firstFinished = !!this.data.content.audios[0].finished this.saveLocalState(0, 'preAudios'); this.setData({ preAudioFinshed: true, firstFinished: firstFinished }) if (!firstFinished) { util.showToast("Tout écouter pour passer à l’étape suivante.", 3000) } }, onAudioEnded: function(e) { switch (this.data.currentStep) {
this.saveLocalState(0); var hasPreAudio = this.data.content.preAudio this.data.content.audios[0].finished = true; var preAudioFinshed = this.data.preAudioFinshed if ((!hasPreAudio || preAudioFinshed)) { this.setData({ firstFinished: true }) } if(hasPreAudio && !preAudioFinshed) { util.showToast("Tout écouter pour passer à l’étape suivante.", 3000) } break case 2: this.saveLocalState(1); this.setData({ secondFinished: true }) break case 4: this.saveLocalState(0, 'optAudios'); wx.setStorage({ key: 'optRecord_' + this.data.paper.id, data: true, }) this.setData({ optRecordFinished: true, optFinished: true }) break } if(this.data.currentStep == 3) { this.saveLocalState(2); return } }, onAudioCycleEnded: function(e) { this.setData({ audioCycleEnded: true }); this.doFinish(); }, onPreAudioReady: function (e) { this.preAudioCtx = e.detail.context }, onAudioReady: function(e){ this.audioCtx = e.detail.context }, onPreAudioPlay: function (e) { this.audioCtx && this.audioCtx.pause() }, onAudioPlay: function (e) { this.preAudioCtx && this.preAudioCtx.pause() }, toOpt: function(){ // this.data.content.audios = this.data.content.optAudios this.setData({ currentStep: 4, // isOptional: true, // content: { ...this.data.content } }) wx.pageScrollTo({ scrollTop: 0 }) this.stopAudio() }, toMiniPro: function() { // wx.navigateToMiniProgram({ // appId: 'wxde736ea090d9d526', // path: 'pages/user/home/home?courseId=1970&start_at=2017-12-12&end_time=2017-12-14', // extraData: { // foo: 'bar' // }, // envVersion: 'release', // success(res) { // // 打开成功 // } // }) this.stopAudio() }, handleFinish: function(e){ this.doFinish(); var t = e.target.dataset.type if (this.data.content.preAudio) { this.goResultPage(t) return } var key = '' if(t == 1) { key = 'paper_' + this.data.paper.id if (!wx.getStorageSync(key) && !this.data.isPreview) { this.goJdk() } else { this.goResultPage(t) } } if (t == 2) { key = 'optPaper_' + this.data.paper.id this.goResultPage(t) } wx.setStorage({ key: key, data: 'finished', }) }, goResultPage: function(t){ wx.navigateTo({ url: '/pages/audition/result?type=' + (t || 1) }) this.stopAudio() }, doFinish: function() { var that = this const { serverTime, openId } = getApp().globalData.userInfo const readToday = util.getCurrentDate(this.data.paper.readToday) service.sendFinish.bind(this)( this.data.paper.semesterId, this.data.paper.id, util.getCurrentDate(this.data.paper.readToday), that.data.paper.wordsTotal, this.data.noLimited ) }, stopAudio: function() { if (wx.getBackgroundAudioManager) { wx.getBackgroundAudioManager().stop() } }, // 去鲸打卡 goJdk() { wx.getStorage({ key: 'currentSemester', success: (res) => { wx.navigateToMiniProgram({ appId: res.data.appId, path: res.data.appPath, envVersion: 'release', success(res) { // 打开成功 console.log(res) }, fail(res) { // 打开成功 console.log(res) } }) }, }) } })
case 1:
random_line_split
audition.js
// pages/audition/audition.js var qcloud = require('../../vendor/wafer2-client-sdk/index') var config = require('../../config.js') var util = require('../../utils/util.js') var moment = require('../../vendor/moment.min') var WxParse = require('../../vendor/wxParse/wxParse.js'); import service from '../../utils/service'; Page({ /** * 页面的初始数据 */ data: { currentStep: 1, disabled: true, // 按钮禁用 fixed: true, // 按钮悬浮 classNames: '', disabledClassName: 'disabled', opened: false, paper: {}, mainEnded: false, optEnded: false, isAdmin: false, preFinished: false, //预习过 isPreview: false, // 试听版 finished: false, // 正课是否完成 optFinished: false, //选修课是否完成 optRecordFinished: false, //选修课录音是否听过 localAudioState: { audios: {}, optAudios: {}, } }, onShow: function(options){ this.doRequest(this.data.options); wx.getStorage({ key: 'admin', success: (res) => { if(res.data) { this.setData({ isAdmin: true, disabledClassName: '' }); } }, }); wx.getStorage({ key: 'record_' + this.data.paper.id, success: (res) => { if(res.data) { this.setData({ localAudioState: res.data }); for(var idx in res.data.audios) { switch(idx) { case 1: this.setData({ firstFinished: true }); break; case 2: this.setData({ secondFinished: true }); break; case 3: this.setData({ audioCycleEnded: true }); break; } } for(var idx in res.data.optAudios) { this.setData({ optRecordFinished: true, optFinished: true }); } for(var idx in res.data.preAudios) { this.setData({ preAudioFinshed: true }); } } } }); }, /** * 生命周期函数--监听页面加载 */ onLoad: function (options) { var that = this this.setData({ options, isPreview: !!options.isPreview, //试听 }) if (options.mode == 'opt') { //选修 this.setData({ currentStep: 4 }); } }, doRequest: function(options) { var that = this getApp().ready(() => { console.log('ready') this.initPageData(_ => { let { mainEnded, optEnded } = this.data if (options.main == 'done' && mainEnded) { let step = 3, optFinished = false // if (optEnded) { // step = 4 // optFinished = true // } this.setData({ currentStep: step, audioCycleEnded: true, optFinished: optFinished }) } wx.getStorage({ key: 'semester_detail_' + this.data.paper.semesterId, success: (res) => { let current; if (current = res.data.statistical.find((item) => item.paperId == this.data.paper.id)) { this.setData({ disabledClassName: '', finished: true }); } }, }) wx.getStorage({ key: 'paper_' + this.data.paper.id, success: (res) => { if (res.data == 'finished') { this.setData({ preFinished: true }) } }, }) wx.getStorage({ key: 'optRecord_' + this.data.paper.id, success: (res) => { if (res.data) { this.setData({ optRecordFinished: true }) } }, }) }) }) wx.getSetting({ success: function (res) { if (!res.authSetting['scope.userInfo']) { wx.openSetting({ success: function (res) { if (res.authSetting['scope.userInfo']) { that.initPageData() } } }) } } }) }, initPageData: function (cb){ var that = this; wx.showLoading({ title: '加载中', }) let url = '', method = 'GET', data, header; if(that.data.options.paperId) { url = config.service.paperUrl + '/' + (that.data.options.paperId || 4); } else if (that.data.options.semesterId) { url = config.service.todayPaperUrl; method = 'POST'; data = { openId: getApp().globalData.userInfo.openId, semesterId: that.data.options.semesterId, readToday: that.data.options.date || util.getCurrentDate() } header = { 'Content-Type': 'application/x-www-form-urlencoded' } } qcloud.request({ url: url, data: data, header: header, method: method, login: true, success(result) { if (result.statusCode != 200) { let msg = '系统异常,请联系管理员'; if (result.data.code == 4042) { msg = '该课程已删除'; } else if (result.data.code == 4043) { msg = '本学期已结束'; } wx.showModal({ title: '提示', content: msg, showCancel: false, success: function (res) { if (res.confirm) { wx.navigateBack({ delta: 1 }) } } }) return } // 跳转到听写 if (result.data.type == 3) { wx.redirectTo({ url: '/pages/dictation/dictation?paperId=' + result.data.id }) return; } // 跳转到测试 if (result.data.type == 2) { wx.redirectTo({ url: '/pages/test/test?paperId=' + result.data.id }) return; } let content = JSON.parse(result.data.content); content.audios.forEach(audio => { audio.key = Math.random() * 100000 }) content.optAudios.forEach(audio => { audio.key = Math.random() * 100000 }) let mainEnded = wx.getStorageSync('paper_' + result.data.id) let optEnded = wx.getStorageSync('optPaper_' + result.data.id) that.setData({ paper: result.data, content: content, mainEnded: mainEnded, optEnded: optEnded }) WxParse.wxParse('original', 'html', content.original, that, 5); WxParse.wxParse('handout', 'html', content.handout, that, 5); WxParse.wxParse('thirdHandout', 'html', content.thirdHandout, that, 5); WxParse.wxParse('optHandout', 'html', content.optHandout, that, 5); if (that.data.currentStep == 1 && content.preAudio) { util.showToast("Tout écouter pour passer à l’étape suivante. ", 3000) } cb && cb() }, fail(error) { // util.showModel('请求失败', error) console.log('request fail', error) }, complete() { wx.hideLoading() } }) }, /** * 生命周期函数--监听页面初次渲
*/ onReady: function () { }, /** * 下一步 */ next: function() { const step = this.data.currentStep + 1; this.setData({ currentStep: step, fixed: true }) if (this.data.currentStep == 3) { setTimeout(() => { util.showToast("Lire au moins cinq fois pour passer l’étape suivante.", 2500) } ,1000) } this.stopAudio() }, toggle: function(e) { var key = e.target.dataset.target this.setData({ [key]: !this.data[key] }) }, // 本地保存音频状态 saveLocalState: function(idx, audioKey) { audioKey = audioKey || 'audios'; var key = 'record_' + this.data.paper.id; wx.getStorage({ key: key, success: function(res) { res.data[audioKey] = res.data[audioKey] || {}; res.data[audioKey][idx] = true; wx.setStorage({ key: key, data: res.data }) }, fail: function() { wx.setStorage({ key: key, data: { [audioKey]: { [idx]: true } } }); } }); }, /** * 音频播放结束 */ onPreAudioEnded: function() { var firstFinished = !!this.data.content.audios[0].finished this.saveLocalState(0, 'preAudios'); this.setData({ preAudioFinshed: true, firstFinished: firstFinished }) if (!firstFinished) { util.showToast("Tout écouter pour passer à l’étape suivante.", 3000) } }, onAudioEnded: function(e) { switch (this.data.currentStep) { case 1: this.saveLocalState(0); var hasPreAudio = this.data.content.preAudio this.data.content.audios[0].finished = true; var preAudioFinshed = this.data.preAudioFinshed if ((!hasPreAudio || preAudioFinshed)) { this.setData({ firstFinished: true }) } if(hasPreAudio && !preAudioFinshed) { util.showToast("Tout écouter pour passer à l’étape suivante.", 3000) } break case 2: this.saveLocalState(1); this.setData({ secondFinished: true }) break case 4: this.saveLocalState(0, 'optAudios'); wx.setStorage({ key: 'optRecord_' + this.data.paper.id, data: true, }) this.setData({ optRecordFinished: true, optFinished: true }) break } if(this.data.currentStep == 3) { this.saveLocalState(2); return } }, onAudioCycleEnded: function(e) { this.setData({ audioCycleEnded: true }); this.doFinish(); }, onPreAudioReady: function (e) { this.preAudioCtx = e.detail.context }, onAudioReady: function(e){ this.audioCtx = e.detail.context }, onPreAudioPlay: function (e) { this.audioCtx && this.audioCtx.pause() }, onAudioPlay: function (e) { this.preAudioCtx && this.preAudioCtx.pause() }, toOpt: function(){ // this.data.content.audios = this.data.content.optAudios this.setData({ currentStep: 4, // isOptional: true, // content: { ...this.data.content } }) wx.pageScrollTo({ scrollTop: 0 }) this.stopAudio() }, toMiniPro: function() { // wx.navigateToMiniProgram({ // appId: 'wxde736ea090d9d526', // path: 'pages/user/home/home?courseId=1970&start_at=2017-12-12&end_time=2017-12-14', // extraData: { // foo: 'bar' // }, // envVersion: 'release', // success(res) { // // 打开成功 // } // }) this.stopAudio() }, handleFinish: function(e){ this.doFinish(); var t = e.target.dataset.type if (this.data.content.preAudio) { this.goResultPage(t) return } var key = '' if(t == 1) { key = 'paper_' + this.data.paper.id if (!wx.getStorageSync(key) && !this.data.isPreview) { this.goJdk() } else { this.goResultPage(t) } } if (t == 2) { key = 'optPaper_' + this.data.paper.id this.goResultPage(t) } wx.setStorage({ key: key, data: 'finished', }) }, goResultPage: function(t){ wx.navigateTo({ url: '/pages/audition/result?type=' + (t || 1) }) this.stopAudio() }, doFinish: function() { var that = this const { serverTime, openId } = getApp().globalData.userInfo const readToday = util.getCurrentDate(this.data.paper.readToday) service.sendFinish.bind(this)( this.data.paper.semesterId, this.data.paper.id, util.getCurrentDate(this.data.paper.readToday), that.data.paper.wordsTotal, this.data.noLimited ) }, stopAudio: function() { if (wx.getBackgroundAudioManager) { wx.getBackgroundAudioManager().stop() } }, // 去鲸打卡 goJdk() { wx.getStorage({ key: 'currentSemester', success: (res) => { wx.navigateToMiniProgram({ appId: res.data.appId, path: res.data.appPath, envVersion: 'release', success(res) { // 打开成功 console.log(res) }, fail(res) { // 打开成功 console.log(res) } }) }, }) } })
染完成
identifier_name
audition.js
// pages/audition/audition.js var qcloud = require('../../vendor/wafer2-client-sdk/index') var config = require('../../config.js') var util = require('../../utils/util.js') var moment = require('../../vendor/moment.min') var WxParse = require('../../vendor/wxParse/wxParse.js'); import service from '../../utils/service'; Page({ /** * 页面的初始数据 */ data: { currentStep: 1, disabled: true, // 按钮禁用 fixed: true, // 按钮悬浮 classNames: '', disabledClassName: 'disabled', opened: false, paper: {}, mainEnded: false, optEnded: false, isAdmin: false, preFinished: false, //预习过 isPreview: false, // 试听版 finished: false, // 正课是否完成 optFinished: false, //选修课是否完成 optRecordFinished: false, //选修课录音是否听过 localAudioState: { audios: {}, optAudios: {}, } }, onShow: function(options){ this.doRequest(this.data.options); wx.getStorage({ key: 'admin', success: (res) => { if(res.data) { this.setData({ isAdmin: true, disabledClassName: '' }); } }, }); wx.getStorage({ key: 'record_' + this.data.paper.id, success: (res) => { if(res.data) { this.setData({ localAudioState: res.data }); for(var idx in res.data.audios) { switch(idx) { case 1: this.setData({ firstFinished: true }); break; case 2: this.setData({ secondFinished: true }); break; case 3: this.setData({ audioCycleEnded: true }); break; } } for(var idx in res.data.optAudios) { this.setData({ optRecordFinished: true, optFinished: true }); } for(var idx in res.data.preAudios) { this.setData({ preAudioFinshed: true }); } } } }); }, /** * 生命周期函数--监听页面加载 */ onLoad: function (options) { var that = this this.setData({ options, isPreview: !!options.isPreview, //试听 }) if (options.mode == 'opt') { //选修 this.setData({ currentStep: 4 }); } }, doRequest: function(options) { var that = this getApp().ready(() => { console.log('ready') this.initPageData(_ => { let { mainEnded, optEnded } = this.data if (options.main == 'done' && mainEnded) { let step = 3, optFinished = false // if (optEnded) { // step = 4 // optFinished = true // } this.setData({ currentStep: step, audioCycleEnded: true, optFinished: optFinished }) } wx.getStorage({ key: 'semester_detail_' + this.data.paper.semesterId, success: (res) => { let current; if (current = res.data.statistical.find((item) => item.paperId == this.data.paper.id)) { this.setData({ disabledClassName: '', finished: true }); } }, }) wx.getStorage({ key: 'paper_' + this.data.paper.id, success: (res) => { if (res.data == 'finished') { this.setData({ preFinished: true }) } }, }) wx.getStorage({ key: 'optRecord_' + this.data.paper.id, success: (res) => { if (res.data) { this.setData({ optRecordFinished: true }) } }, }) }) }) wx.getSetting({ success: function (res) { if (!res.authSetting['scope.userInfo']) { wx.openSetting({ success: function (res) { if (res.authSetting['scope.userInfo']) { that.initPageData() } } }) } } }) }, initPageData: function (cb){ var that = this; wx.showLoading({ title: '加载中', }) let url = '', method = 'GET', data, header; if(that.data.options.paperId) { url = config.service.paperUrl + '/' + (that.data.options.paperId || 4); } else if (that.data.options.semesterId) { url = config.service.todayPaperUrl; method = 'POST'; data = { openId: getApp().globalData.userInfo.openId, semesterId: that.data.options.semesterId, readToday: that.data.options.date || util.getCurrentDate() } header = { 'Content-Type': 'application/x-www-form-urlencoded' } } qcloud.request({ url: url, data: data, header: header, method: method, login: true, success(result) { if (result.statusCode != 200) { let msg = '系统异常,请联系管理员'; if (result.data.code == 4042) { msg = '该课程已删除'; } else if (result.data.code == 4043) { msg = '本学期已结束'; } wx.showModal({ title: '提示', content: msg, showCancel: false, success: function (res) { if (res.confirm) { wx.navigateBack({ delta: 1 }) } } }) return } // 跳转到听写 if (result.data.type == 3) { wx.redirectTo({ url: '/pages/dictation/dictation?paperId=' + result.data.id }) return; } // 跳转到测试 if (result.data.type == 2) { wx.redirectTo({ url: '/pages/test/test?paperId=' + result.data.id }) return; } let content = JSON.parse(result.data.content); content.audios.forEach(audio => { audio.key = Math.random() * 100000 }) content.optAudios.forEach(audio => { audio.key = Math.random() * 100000 }) let mainEnded = wx.getStorageSync('paper_' + result.data.id) let optEnded = wx.getStorageSync('optPaper_' + result.data.id) that.setData({ paper: result.data, content: content, mainEnded: mainEnded, optEnded: optEnded }) WxParse.wxParse('original', 'html', content.original, that, 5); WxParse.wxParse('handout', 'html', content.handout, that, 5); WxParse.wxParse('thirdHandout', 'html', content.thirdHandout, that, 5); WxParse.wxParse('optHandout', 'html', content.optHandout, that, 5); if (that.data.currentStep == 1 && content.preAudio) { util.showToast("Tout écouter pour passer à l’étape suivante. ", 3000) } cb && cb() }, fail(error) { // util.showModel('请求失败', error) console.log('request fail', error) }, complete() { wx.hideLoading() } }) }, /** * 生命周期函数--监听页面初次渲染完成 */ onReady: function () { }, /** * 下一步 */ next: function() { const step = this.data.currentStep + 1; t
tep, fixed: true }) if (this.data.currentStep == 3) { setTimeout(() => { util.showToast("Lire au moins cinq fois pour passer l’étape suivante.", 2500) } ,1000) } this.stopAudio() }, toggle: function(e) { var key = e.target.dataset.target this.setData({ [key]: !this.data[key] }) }, // 本地保存音频状态 saveLocalState: function(idx, audioKey) { audioKey = audioKey || 'audios'; var key = 'record_' + this.data.paper.id; wx.getStorage({ key: key, success: function(res) { res.data[audioKey] = res.data[audioKey] || {}; res.data[audioKey][idx] = true; wx.setStorage({ key: key, data: res.data }) }, fail: function() { wx.setStorage({ key: key, data: { [audioKey]: { [idx]: true } } }); } }); }, /** * 音频播放结束 */ onPreAudioEnded: function() { var firstFinished = !!this.data.content.audios[0].finished this.saveLocalState(0, 'preAudios'); this.setData({ preAudioFinshed: true, firstFinished: firstFinished }) if (!firstFinished) { util.showToast("Tout écouter pour passer à l’étape suivante.", 3000) } }, onAudioEnded: function(e) { switch (this.data.currentStep) { case 1: this.saveLocalState(0); var hasPreAudio = this.data.content.preAudio this.data.content.audios[0].finished = true; var preAudioFinshed = this.data.preAudioFinshed if ((!hasPreAudio || preAudioFinshed)) { this.setData({ firstFinished: true }) } if(hasPreAudio && !preAudioFinshed) { util.showToast("Tout écouter pour passer à l’étape suivante.", 3000) } break case 2: this.saveLocalState(1); this.setData({ secondFinished: true }) break case 4: this.saveLocalState(0, 'optAudios'); wx.setStorage({ key: 'optRecord_' + this.data.paper.id, data: true, }) this.setData({ optRecordFinished: true, optFinished: true }) break } if(this.data.currentStep == 3) { this.saveLocalState(2); return } }, onAudioCycleEnded: function(e) { this.setData({ audioCycleEnded: true }); this.doFinish(); }, onPreAudioReady: function (e) { this.preAudioCtx = e.detail.context }, onAudioReady: function(e){ this.audioCtx = e.detail.context }, onPreAudioPlay: function (e) { this.audioCtx && this.audioCtx.pause() }, onAudioPlay: function (e) { this.preAudioCtx && this.preAudioCtx.pause() }, toOpt: function(){ // this.data.content.audios = this.data.content.optAudios this.setData({ currentStep: 4, // isOptional: true, // content: { ...this.data.content } }) wx.pageScrollTo({ scrollTop: 0 }) this.stopAudio() }, toMiniPro: function() { // wx.navigateToMiniProgram({ // appId: 'wxde736ea090d9d526', // path: 'pages/user/home/home?courseId=1970&start_at=2017-12-12&end_time=2017-12-14', // extraData: { // foo: 'bar' // }, // envVersion: 'release', // success(res) { // // 打开成功 // } // }) this.stopAudio() }, handleFinish: function(e){ this.doFinish(); var t = e.target.dataset.type if (this.data.content.preAudio) { this.goResultPage(t) return } var key = '' if(t == 1) { key = 'paper_' + this.data.paper.id if (!wx.getStorageSync(key) && !this.data.isPreview) { this.goJdk() } else { this.goResultPage(t) } } if (t == 2) { key = 'optPaper_' + this.data.paper.id this.goResultPage(t) } wx.setStorage({ key: key, data: 'finished', }) }, goResultPage: function(t){ wx.navigateTo({ url: '/pages/audition/result?type=' + (t || 1) }) this.stopAudio() }, doFinish: function() { var that = this const { serverTime, openId } = getApp().globalData.userInfo const readToday = util.getCurrentDate(this.data.paper.readToday) service.sendFinish.bind(this)( this.data.paper.semesterId, this.data.paper.id, util.getCurrentDate(this.data.paper.readToday), that.data.paper.wordsTotal, this.data.noLimited ) }, stopAudio: function() { if (wx.getBackgroundAudioManager) { wx.getBackgroundAudioManager().stop() } }, // 去鲸打卡 goJdk() { wx.getStorage({ key: 'currentSemester', success: (res) => { wx.navigateToMiniProgram({ appId: res.data.appId, path: res.data.appPath, envVersion: 'release', success(res) { // 打开成功 console.log(res) }, fail(res) { // 打开成功 console.log(res) } }) }, }) } })
his.setData({ currentStep: s
identifier_body
audition.js
// pages/audition/audition.js var qcloud = require('../../vendor/wafer2-client-sdk/index') var config = require('../../config.js') var util = require('../../utils/util.js') var moment = require('../../vendor/moment.min') var WxParse = require('../../vendor/wxParse/wxParse.js'); import service from '../../utils/service'; Page({ /** * 页面的初始数据 */ data: { currentStep: 1, disabled: true, // 按钮禁用 fixed: true, // 按钮悬浮 classNames: '', disabledClassName: 'disabled', opened: false, paper: {}, mainEnded: false, optEnded: false, isAdmin: false, preFinished: false, //预习过 isPreview: false, // 试听版 finished: false, // 正课是否完成 optFinished: false, //选修课是否完成 optRecordFinished: false, //选修课录音是否听过 localAudioState: { audios: {}, optAudios: {}, } }, onShow: function(options){ this.doRequest(this.data.options); wx.getStorage({ key: 'admin', success: (res) => { if(res.data) { this.setData({ isAdmin: true, disabledClassName: '' }); } }, }); wx.getStorage({ key: 'record_' + this.data.paper.id, success: (res) => { if(res.data) { this.setData({ localAudioState: res.data }); for(var idx in res.data.audios) { switch(idx) { case 1: this.setData({ firstFinished: true }); break; case 2: this.setData({ secondFinished: true }); break; case 3: this.setData({ audioCycleEnded: true }); break; } } for(var idx in res.data.optAudios) { this.setData({ optRecordFinished: true, optFinished: true }); } for(var idx in res.data.preAudios) { this.setData({ preAudioFinshed: true }); } } } }); }, /** * 生命周期函数--监听页面加载 */ onLoad: function (options) { var that = this this.setData({ options, isPreview: !!options.isPreview, //试听 }) if (options.mode == 'opt') { //选修 this.setData({ currentStep: 4 }); } }, doRequest: function(options) { var that = this getApp().ready(() => { console.log('ready') this.initPageData(_ => { let { mainEnded, optEnded } = this.data if (options.main == 'done' && mainEnded) { let step = 3, optFinished = false // if (optEnded) { // step = 4 // optFinished = true // } this.setData({ currentStep: step, audioCycleEnded: true, optFinished: optFinished }) } wx.getStorage({ key: 'semester_detail_' + this.data.paper.semesterId, success: (res) => { let current; if (current = res.data.statistical.find((item) => item.paperId == this.data.paper.id)) { this.setData({ disabledClassName: '', finished: true }); } }, }) wx.getStorage({ key: 'paper_' + this.data.paper.id, success: (res) => { if (res.data == 'finished') { this.setData({ preFinished: true }) } }, }) wx.getStorage({ key: 'optRecord_' + this.data.paper.id, success: (res) => { if (res.data) { this.setData({ optRecordFinished: true }) } }, }) }) }) wx.getSetting({ success: function (res) { if (!res.authSetting['scope.userInfo']) { wx.openSetting({ success: function (res) { if (res.authSetting['scope.userInfo']) { that.initPageData() } } }) } } }) }, initPageData: function (cb){ var that = this; wx.showLoading({ title: '加载中', }) let url = '', method = 'GET', data, header; if(that.data.options.paperId) { url = config.service.paperUrl + '/' + (that.data.options.paperId || 4); } else if (that.data.options.semesterId) { url = config.service.todayPaperUrl; method = 'POST'; data = { openId: getApp().globalData.userInfo.openId, semesterId: that.data.options.semesterId, readToday: that.data.options.date || util.getCurrentDate() } header = { 'Content-Type': 'application/x-www-form-urlencoded' } } qcloud.request({ url: url, data: data, header: header, method: method, login: true, success(result) { if (result.statusCode != 200) { let msg = '系统异常,请联系管理员'; if (result.data.code == 4042) { msg = '该课程已删除'; } else if (result.data.code == 4043) { msg = '本学期已结束'; } wx.showModal({ title: '提示', content: msg, showCancel: false, success: function (res) { if (res.confirm) { wx.navigateBack({ delta: 1 }) } } }) return } // 跳转到听写 if (result.data.type == 3) { wx.redirectTo({ url: '/pages/dictation/dictation?paperId=' + result.data.id }) return; } // 跳转到测试 if (result.data.type == 2) { wx.redirectTo({ url: '/pages/test/test?paperId=' + result.data.id }) return; } let content = JSON.parse(result.data.content); content.audios.forEach(audio => { audio.key = Math.random() * 100000 }) content.optAudios.forEach(audio => { audio.key = Math.random() * 100000 }) let mainEnded = wx.getStorageSync('paper_' + result.data.id) let optEnded = wx.getStorageSync('optPaper_' + result.data.id) that.setData({ paper: result.data, content: content, mainEnded: mainEnded, optEnded: optEnded }) WxParse.wxParse('original', 'html', content.original, that, 5); WxParse.wxParse('handout', 'html', content.handout, that, 5); WxParse.wxParse('thirdHandout', 'html', content.thirdHandout, that, 5); WxParse.wxParse('optHandout', 'html', content.optHandout, that, 5); if (that.data.currentStep == 1 && content.preAudio) { util.showToast("Tout écouter pour passer à l’étape suivante. ", 3000) } cb && cb() }, fail(error) { // util.showModel('请求失败', error) console.log('request fail', error) }, complete() { wx.hideLoading() } }) }, /** * 生命周期函数--监听页面初次渲染完成 */ onReady: function () { }, /** * 下一步 */ next: function() { const step = this.data.currentStep + 1; this.setData({ currentStep: step, fixed: true }) if (this.data.currentStep == 3) { setTimeout(() => { util.showToast("Lire au moins cinq fois pour passer l’étape suivante.", 2500) } ,1000) } this.stopAudio() }, toggle: function(e) { var key = e.target.dataset.target this.setData({ [key]: !this.data[key] }) }, // 本地保存音频状态 saveLocalState: function(idx, audioKey) { audioKey = audioKey || 'audios'; var key = 'record_' + this.data.paper.id; wx.getStorage({ key: key, success: function(res) { res.data[audioKey] = res.data[audioKey] || {}; res.data[audioKey][idx] = true; wx.setStorage({ key: key, data: res.data }) }, fail: function() { wx.setStorage({ key: key, data: { [audioKey]: { [idx]: true } } }); } }); }, /** * 音频播放结束 */ onPreAudioEnded: function() { var firstFinished = !!this.data.content.audios[0].finished this.saveLocalState(0, 'preAudios'); this.setData({ preAudioFinshed: true, firstFinished: firstFinished }) if (!firstFinished) { util.showToast("Tout écouter pour passer à l’étape suivante.", 3000) } }, onAudioEnded: function(e) { switch (this.data.currentStep) { case 1: this.saveLocalState(0); var hasPreAudio = this.data.content.preAudio this.data.content.audios[0].finished = true; var preAudioFinshed = this.data.preAudioFinshed if ((!hasPreAudio || preAudioFinshed)) { this.setData({ firstFinished: true }) } if(hasPreAudio && !preAudioFinshed) { util.showToast("Tout écouter pour passer à l’étape suivante.", 3000) } break case 2: this.saveLocalState(1);
case 4: this.saveLocalState(0, 'optAudios'); wx.setStorage({ key: 'optRecord_' + this.data.paper.id, data: true, }) this.setData({ optRecordFinished: true, optFinished: true }) break } if(this.data.currentStep == 3) { this.saveLocalState(2); return } }, onAudioCycleEnded: function(e) { this.setData({ audioCycleEnded: true }); this.doFinish(); }, onPreAudioReady: function (e) { this.preAudioCtx = e.detail.context }, onAudioReady: function(e){ this.audioCtx = e.detail.context }, onPreAudioPlay: function (e) { this.audioCtx && this.audioCtx.pause() }, onAudioPlay: function (e) { this.preAudioCtx && this.preAudioCtx.pause() }, toOpt: function(){ // this.data.content.audios = this.data.content.optAudios this.setData({ currentStep: 4, // isOptional: true, // content: { ...this.data.content } }) wx.pageScrollTo({ scrollTop: 0 }) this.stopAudio() }, toMiniPro: function() { // wx.navigateToMiniProgram({ // appId: 'wxde736ea090d9d526', // path: 'pages/user/home/home?courseId=1970&start_at=2017-12-12&end_time=2017-12-14', // extraData: { // foo: 'bar' // }, // envVersion: 'release', // success(res) { // // 打开成功 // } // }) this.stopAudio() }, handleFinish: function(e){ this.doFinish(); var t = e.target.dataset.type if (this.data.content.preAudio) { this.goResultPage(t) return } var key = '' if(t == 1) { key = 'paper_' + this.data.paper.id if (!wx.getStorageSync(key) && !this.data.isPreview) { this.goJdk() } else { this.goResultPage(t) } } if (t == 2) { key = 'optPaper_' + this.data.paper.id this.goResultPage(t) } wx.setStorage({ key: key, data: 'finished', }) }, goResultPage: function(t){ wx.navigateTo({ url: '/pages/audition/result?type=' + (t || 1) }) this.stopAudio() }, doFinish: function() { var that = this const { serverTime, openId } = getApp().globalData.userInfo const readToday = util.getCurrentDate(this.data.paper.readToday) service.sendFinish.bind(this)( this.data.paper.semesterId, this.data.paper.id, util.getCurrentDate(this.data.paper.readToday), that.data.paper.wordsTotal, this.data.noLimited ) }, stopAudio: function() { if (wx.getBackgroundAudioManager) { wx.getBackgroundAudioManager().stop() } }, // 去鲸打卡 goJdk() { wx.getStorage({ key: 'currentSemester', success: (res) => { wx.navigateToMiniProgram({ appId: res.data.appId, path: res.data.appPath, envVersion: 'release', success(res) { // 打开成功 console.log(res) }, fail(res) { // 打开成功 console.log(res) } }) }, }) } })
this.setData({ secondFinished: true }) break
conditional_block
Minitaur_Env.py
#!/usr/bin/env python3 import os # SUPPRESS PRINTING null_fds = [os.open(os.devnull, os.O_RDWR) for x in range(2)] save = os.dup(1), os.dup(2) os.dup2(null_fds[0], 1) os.dup2(null_fds[1], 2) import numpy as np from pybullet_envs.minitaur.envs import minitaur_gym_env import math from policy.minitaur_policy import Policy import torch import warnings warnings.filterwarnings('ignore') # Set the initial position from pybullet_envs.minitaur.envs import minitaur minitaur.INIT_POSITION = [0, 0, 0.2] # ENABLE PRINTING os.dup2(save[0], 1) os.dup2(save[1], 2) os.close(null_fds[0]) os.close(null_fds[1]) class Environment: def __init__(self, max_angle, time_step=0.02, gui=False): self.gui = gui self.max_angle = max_angle if self.gui: self.minitaur_env = minitaur_gym_env.MinitaurGymEnv( urdf_version=minitaur_gym_env.DERPY_V0_URDF_VERSION, render=True, motor_velocity_limit=np.inf, pd_control_enabled=True, hard_reset=False, on_rack=False, reflection=False) else: self.minitaur_env = minitaur_gym_env.MinitaurGymEnv( urdf_version=minitaur_gym_env.DERPY_V0_URDF_VERSION, render=False, motor_velocity_limit=np.inf, pd_control_enabled=True, hard_reset=False, on_rack=False) self.minitaur_env.minitaur.time_step = time_step self.p = self.minitaur_env._pybullet_client # self.minitaur_env.minitaur.SetFootFriction(1.0) # self.minitaur_env.minitaur.SetFootRestitution(0.1) # self.prim_lib = np.load('prim_lib.npy') # textureId = self.p.loadTexture("heightmaps/table.png") # self.p.changeVisualShape(self.minitaur_env.ground_id, -1, textureUniqueId=textureId) self.terraintextureId = self.p.loadTexture("heightmaps/oak-wood.png") def generate_htfield(self, num_rows=12):
def is_fallen(self): """Decide whether the minitaur has fallen. If the up directions between the base and the world is larger (the dot product is smaller than 0.5), the minitaur is considered fallen. Returns: Boolean value that indicates whether the minitaur has fallen. """ orientation = self.minitaur_env.minitaur.GetBaseOrientation() rot_mat = self.minitaur_env._pybullet_client.getMatrixFromQuaternion(orientation) local_up = rot_mat[6:] return (np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.3) def generate_steps(self, numObs=25): p = self.p numObs *= 2 linkMasses = [None] * (numObs) colIdxs = [None] * (numObs) visIdxs = [None] * (numObs) posObs = [None] * (numObs) orientObs = [None] * (numObs) parentIdxs = [None] * (numObs) linkInertialFramePositions = [None] * (numObs) linkInertialFrameOrientations = [None] * (numObs) linkJointTypes = [None] * (numObs) linkJointAxis = [None] * (numObs) for obs in range(numObs): linkMasses[obs] = 0.0 parentIdxs[obs] = 0 linkInertialFramePositions[obs] = [0, 0, 0] linkInertialFrameOrientations[obs] = [0, 0, 0, 1] linkJointTypes[obs] = p.JOINT_FIXED linkJointAxis[obs] = np.array([0, 0, 1]) orientObs[obs] = p.getQuaternionFromEuler([0., np.pi/4, 0]) posObs, orientObs, colIdxs, visIdxs = self._generate_steps_sub(p, posObs, orientObs, colIdxs, numObs) obsUid = p.createMultiBody(baseCollisionShapeIndex=-1, baseVisualShapeIndex=-1, basePosition=[0, 0, 0], baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0], baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses, linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs, linkPositions=posObs, linkOrientations=orientObs, linkParentIndices=parentIdxs, linkInertialFramePositions=linkInertialFramePositions, linkInertialFrameOrientations=linkInertialFrameOrientations, linkJointTypes=linkJointTypes, linkJointAxis=linkJointAxis) for obs in range(numObs): p.changeVisualShape(obsUid, visIdxs[obs], textureUniqueId=self.terraintextureId) x_goal = self.goal y_goal = 0 posObs = np.array([None] * 3) posObs[0] = x_goal posObs[1] = y_goal posObs[2] = 0 # set z at ground level # colIdxs = p.createCollisionShape(p.GEOM_BOX, halfExtents=[0.1,5.0,0.1]) colIdxs = -1 visIdxs = p.createVisualShape(p.GEOM_BOX, halfExtents=[0.05,5.0,0.15], rgbaColor=[0.7, 0, 0, 1]) linkMasses = 0.0 parentIdxs = 0 linkInertialFramePositions = [0, 0, 0] linkInertialFrameOrientations = [0, 0, 0, 1] linkJointTypes = p.JOINT_FIXED linkJointAxis = np.array([0, 0, 1]) orientObs = p.getQuaternionFromEuler([0., 0., 0.]) p.createMultiBody(baseCollisionShapeIndex=colIdxs, baseVisualShapeIndex=visIdxs, basePosition=posObs)#, return obsUid def _generate_steps_sub(self, p, posObs, orientObs, colIdxs, numObs): visIdxs = [None]*numObs for obs in range(int(numObs/2)): # Cylindrical obstacles posObs_obs1 = [None] * 3 posObs_obs2 = [None] * 3 theta = np.random.rand(1)*(self.max_angle*math.pi/180)*(2/3) l1 = 0.5 l2 = l1 theta_rotate = theta h = l1 * np.sin(np.pi/4 - theta) /(2**0.5) d = (2**0.5) * l1 * np.cos(np.pi/4 + theta) halfExtents = [l1/2,5.0,l2/2] x_temp = 0.5 + obs*l1 y_temp = 0 posObs_obs1[0] = x_temp posObs_obs1[1] = y_temp posObs_obs1[2] = -h # set z at ground level posObs[2*obs] = posObs_obs1 colIdxs[2*obs] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents) orientObs[2*obs] = p.getQuaternionFromEuler([0., -theta_rotate, 0]) visIdxs[2*obs] = p.createVisualShape(p.GEOM_BOX, halfExtents=halfExtents,) # print(posObs_obs[0]) posObs_obs2[0] = x_temp + d # print(posObs_obs[0]) posObs_obs2[1] = y_temp posObs_obs2[2] = -h # set z at ground level posObs[2*obs+1] = posObs_obs2 colIdxs[2*obs+1] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents) orientObs[2*obs+1] = p.getQuaternionFromEuler([0., theta_rotate, 0]) visIdxs[2*obs+1] = p.createVisualShape(p.GEOM_BOX, halfExtents=halfExtents,) return posObs, orientObs, colIdxs, visIdxs # def _get_bounding_amplitude(self, prim): # return self.prim_lib[prim] def execute_policy(self, policy, goal, alpha, time_step=0.01, speed=40, comp_len=10, prim_horizon=50, image_size=50, device=torch.device('cuda'), record_vid=False, vid_num=0): if record_vid: import cv2 # videoObj = cv2.VideoWriter('video'+str(vid_num)+'.avi', cv2.VideoWriter_fourcc('M','J','P','G'), # 50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT)) videoObj = cv2.VideoWriter('video'+str(vid_num)+'.mp4', cv2.VideoWriter_fourcc(*'mp4v'), 50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT)) t_flag = 0 goal_cost = np.zeros(1) coll_cost = np.ones(1) cost = goal_cost + coll_cost total_time_steps = comp_len * prim_horizon # baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0], # baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses, # linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs, # linkPositions=posObs) # , linkOrientations=orientObs, linkParentIndices=parentIdxs, # linkInertialFramePositions=linkInertialFramePositions, # linkInertialFrameOrientations=linkInertialFrameOrientations, # linkJointTypes=linkJointTypes, linkJointAxis=linkJointAxis) for i in range(5): action = [0,0,0,0,0,0,0,0] self.minitaur_env.step(action) for i in range(comp_len): # Get current depth map cam_pos = list(self.minitaur_env.minitaur.GetBasePosition()) cam_orn = list(self.minitaur_env.minitaur.GetTrueBaseOrientation()) _, depth = self._mount_cam(cam_pos, cam_orn) # Decide primitive from the policy depth = torch.Tensor(depth).view([1, 1, image_size, image_size]) assert depth.nelement()!=0,"Tensor is empty." motor_angles = torch.Tensor(self.minitaur_env.minitaur.GetMotorAngles()).view([1,8]).detach() motor_velocities = torch.Tensor(self.minitaur_env.minitaur.GetMotorVelocities()).view([1,8]).detach() base_pos = torch.Tensor(self.minitaur_env.minitaur.GetBasePosition()).view([1,3]).detach() base_orn = torch.Tensor(self.minitaur_env.minitaur.GetBaseRollPitchYaw()).view([1,3]).detach() control_params = policy(depth.to(device), motor_angles.to(device), motor_velocities.to(device), base_pos.to(device), base_orn.to(device))[0] amplitude1 = (control_params[0].item()*0.8)+0.2 # amplitude1 = torch.clamp(control_params[0], min=0.2, max=1.0).item() amplitude2 = (control_params[1].item()*0.8)+0.2 # amplitude2 = torch.clamp(control_params[0], min=0.2, max=1.0).item() steering_amplitude = torch.clamp(control_params[2], min=0.0, max=min(1-amplitude1, 1-amplitude2)).item() # phase1 = control_params[3].item() * math.pi # phase2 = control_params[4].item() * math.pi speed = control_params[3].item()*20 + 20 for step_counter in range(prim_horizon): t = step_counter * time_step + t_flag # if t>4.1: # import matplotlib.pyplot as plt # cam_pos = list(self.minitaur_env.minitaur.GetBasePosition()) # cam_orn = list(self.minitaur_env.minitaur.GetTrueBaseOrientation()) # rgb, _ = self._mount_cam(cam_pos, cam_orn, w=500, h=500) # fig = plt.figure() # ax = plt.subplot(111) # ax.set_yticklabels([]) # ax.set_xticklabels([]) # plt.imshow(rgb, cmap='gray', interpolation='nearest') # plt.savefig('minitaur_rgb_view.png') # time.sleep(600) # amplitude1 = 0.5 # amplitude2 = 0.5 # steering_amplitude = 0.0 # speed = 50 if record_vid: rgb = self.minitaur_env.render() cv2.imshow('Vis_Vid_Rec', cv2.cvtColor(np.uint8(rgb), cv2.COLOR_RGB2BGR)) cv2.waitKey(1) videoObj.write(cv2.cvtColor(np.uint8(rgb), cv2.COLOR_RGB2BGR)) phase1 = math.pi phase2 = phase1 # Applying asymmetrical sine gaits to different legs can steer the minitaur. a1 = math.sin(t * speed) * (amplitude1 + steering_amplitude) a2 = math.sin(t * speed + phase1) * (amplitude1 - steering_amplitude) a3 = math.sin(t * speed) * amplitude2 a4 = math.sin(t * speed + phase2) * amplitude2 action = [a1, a2, a2, a1, a3, a4, a4, a3] self.minitaur_env.step(action) # Compute costs # rob_pos = np.array(cam_pos[0:2]) rob_pos = cam_pos[0] # goal_cost = np.linalg.norm(rob_pos-goal, ord=2)/10 goal_cost = np.abs(rob_pos-goal)/goal fall_cost = 1 - (step_counter + i*prim_horizon)/(total_time_steps-1) cost = alpha * fall_cost + (1-alpha) * goal_cost end_position = self.minitaur_env.minitaur.GetBasePosition() end_position = end_position[0] if self.is_fallen(): if record_vid: videoObj.release() return cost, fall_cost, goal_cost, end_position if end_position>goal: goal_cost = 0. cost = alpha * fall_cost + (1-alpha) * goal_cost if record_vid: videoObj.release() return cost, fall_cost, goal_cost, end_position end_position = self.minitaur_env.minitaur.GetBasePosition() end_position = end_position[0] # print(end_position) # print("Speed:", np.linalg.norm(start_position-end_position)/(prim_horizon*time_step)) t_flag += prim_horizon*time_step if record_vid: videoObj.release() return cost, fall_cost, goal_cost, end_position def _mount_cam(self, base_p, base_o, w=50, h=50): ''' Mounts an RGB-D camera on a robot in pybullet Parameters ---------- w : Width h : Height base_p : Base position base_o : Base orientation as a quaternion Returns ------- rgb : RGB image depth : Depth map ''' p = self.p cam_pos = base_p # Rotation matrix rot_matrix = p.getMatrixFromQuaternion(base_o) rot_matrix = np.array(rot_matrix).reshape(3, 3) # Initial vectors init_camera_vector = (1, 0, -0.05) # x-axis init_up_vector = (0, 0, 1) # z-axis cam_translate = (0.1,0,0.1) # Shift camera from the base poitions w.r.t. to the link's local frame # Rotated vectors camera_vector = rot_matrix.dot(init_camera_vector) up_vector = rot_matrix.dot(init_up_vector) cam_pos = cam_pos + rot_matrix.dot(cam_translate) view_matrix = p.computeViewMatrix(cam_pos, cam_pos + 0.1 * camera_vector, up_vector) # Get Image projection_matrix = p.computeProjectionMatrixFOV(fov=90.0, aspect=1., nearVal=0.01, farVal=1000.) # heightfield does not work with the tiny renderer, have to use the openGL renderer _, _, rgb, depth, _ = p.getCameraImage(w, h, view_matrix, projection_matrix)#, flags=p.ER_NO_SEGMENTATION_MASK) # Reshape rgb image and drop the alpha layer (#4) rgb = np.array(rgb, dtype=np.uint8) rgb = np.reshape(rgb, (w, h, 4)) rgb = rgb[:, :, :3] # Reshape depth map depth = np.array(depth, dtype=np.float32) far=1000.0 near=0.01 depth = far*near/(far - (far - near)*depth) depth = np.reshape(depth, (w, h)) return rgb, depth if __name__ == '__main__': env = Environment([0,0.1], gui=True) env.terrain = env.generate_htfield() env.minitaur_env.reset() policy = Policy() policy = policy.to('cuda') cost, coll_cost, goal_cost = env.execute_policy(policy, goal=np.ones(2), alpha=0.5) print(cost, coll_cost, goal_cost)
'''Generate a heightfield. Resource: https://github.com/bulletphysics/bullet3/blob/master/examples/pybullet/examples/heightfield.py''' p = self.p numHeightfieldRows = num_rows numHeightfieldColumns = num_rows heightfieldData = [0]*numHeightfieldRows*numHeightfieldColumns for i in range(numHeightfieldRows*numHeightfieldColumns): # heightfieldData[i] = 0.1 if (i%2)==0: heightfieldData[i] = np.random.uniform(self.h_lim[0],self.h_lim[1]) else: heightfieldData[i] = 0 terrainShape = p.createCollisionShape(shapeType = p.GEOM_HEIGHTFIELD, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, # this flag ensures foot does not get stuck meshScale=[1,1,1], heightfieldTextureScaling=(numHeightfieldRows-1)/2, heightfieldData=heightfieldData, numHeightfieldRows=numHeightfieldRows, numHeightfieldColumns=numHeightfieldColumns) textureId = p.loadTexture("heightmaps/wm_height_out.png") terrain = p.createMultiBody(0, terrainShape) # p.changeVisualShape(terrain, -1, rgbaColor=[1,1,1,1]) p.changeVisualShape(terrain, -1, textureUniqueId = textureId) # Remove the previous terrain and establish the new one # Note: first time this function is called, the default terrain of minitaur_env # which is plane ground is removed. Subsequent calls remove the previous heightfield self.minitaur_env._pybullet_client.removeBody(self.minitaur_env.ground_id) self.minitaur_env.ground_id = terrain return terrain
identifier_body
Minitaur_Env.py
#!/usr/bin/env python3 import os # SUPPRESS PRINTING null_fds = [os.open(os.devnull, os.O_RDWR) for x in range(2)] save = os.dup(1), os.dup(2) os.dup2(null_fds[0], 1) os.dup2(null_fds[1], 2) import numpy as np from pybullet_envs.minitaur.envs import minitaur_gym_env import math from policy.minitaur_policy import Policy import torch import warnings warnings.filterwarnings('ignore') # Set the initial position from pybullet_envs.minitaur.envs import minitaur minitaur.INIT_POSITION = [0, 0, 0.2] # ENABLE PRINTING os.dup2(save[0], 1) os.dup2(save[1], 2) os.close(null_fds[0]) os.close(null_fds[1]) class Environment: def __init__(self, max_angle, time_step=0.02, gui=False): self.gui = gui self.max_angle = max_angle if self.gui: self.minitaur_env = minitaur_gym_env.MinitaurGymEnv( urdf_version=minitaur_gym_env.DERPY_V0_URDF_VERSION, render=True, motor_velocity_limit=np.inf, pd_control_enabled=True, hard_reset=False, on_rack=False, reflection=False) else: self.minitaur_env = minitaur_gym_env.MinitaurGymEnv( urdf_version=minitaur_gym_env.DERPY_V0_URDF_VERSION, render=False, motor_velocity_limit=np.inf, pd_control_enabled=True, hard_reset=False, on_rack=False) self.minitaur_env.minitaur.time_step = time_step self.p = self.minitaur_env._pybullet_client # self.minitaur_env.minitaur.SetFootFriction(1.0) # self.minitaur_env.minitaur.SetFootRestitution(0.1) # self.prim_lib = np.load('prim_lib.npy') # textureId = self.p.loadTexture("heightmaps/table.png") # self.p.changeVisualShape(self.minitaur_env.ground_id, -1, textureUniqueId=textureId) self.terraintextureId = self.p.loadTexture("heightmaps/oak-wood.png") def generate_htfield(self, num_rows=12): '''Generate a heightfield. Resource: https://github.com/bulletphysics/bullet3/blob/master/examples/pybullet/examples/heightfield.py''' p = self.p numHeightfieldRows = num_rows numHeightfieldColumns = num_rows heightfieldData = [0]*numHeightfieldRows*numHeightfieldColumns for i in range(numHeightfieldRows*numHeightfieldColumns): # heightfieldData[i] = 0.1 if (i%2)==0: heightfieldData[i] = np.random.uniform(self.h_lim[0],self.h_lim[1]) else: heightfieldData[i] = 0 terrainShape = p.createCollisionShape(shapeType = p.GEOM_HEIGHTFIELD, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, # this flag ensures foot does not get stuck meshScale=[1,1,1], heightfieldTextureScaling=(numHeightfieldRows-1)/2, heightfieldData=heightfieldData, numHeightfieldRows=numHeightfieldRows, numHeightfieldColumns=numHeightfieldColumns) textureId = p.loadTexture("heightmaps/wm_height_out.png") terrain = p.createMultiBody(0, terrainShape) # p.changeVisualShape(terrain, -1, rgbaColor=[1,1,1,1]) p.changeVisualShape(terrain, -1, textureUniqueId = textureId) # Remove the previous terrain and establish the new one # Note: first time this function is called, the default terrain of minitaur_env # which is plane ground is removed. Subsequent calls remove the previous heightfield self.minitaur_env._pybullet_client.removeBody(self.minitaur_env.ground_id) self.minitaur_env.ground_id = terrain return terrain def is_fallen(self): """Decide whether the minitaur has fallen. If the up directions between the base and the world is larger (the dot product is smaller than 0.5), the minitaur is considered fallen. Returns: Boolean value that indicates whether the minitaur has fallen. """ orientation = self.minitaur_env.minitaur.GetBaseOrientation() rot_mat = self.minitaur_env._pybullet_client.getMatrixFromQuaternion(orientation) local_up = rot_mat[6:] return (np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.3) def generate_steps(self, numObs=25): p = self.p numObs *= 2 linkMasses = [None] * (numObs) colIdxs = [None] * (numObs) visIdxs = [None] * (numObs) posObs = [None] * (numObs) orientObs = [None] * (numObs) parentIdxs = [None] * (numObs) linkInertialFramePositions = [None] * (numObs) linkInertialFrameOrientations = [None] * (numObs) linkJointTypes = [None] * (numObs) linkJointAxis = [None] * (numObs) for obs in range(numObs): linkMasses[obs] = 0.0 parentIdxs[obs] = 0 linkInertialFramePositions[obs] = [0, 0, 0] linkInertialFrameOrientations[obs] = [0, 0, 0, 1] linkJointTypes[obs] = p.JOINT_FIXED linkJointAxis[obs] = np.array([0, 0, 1]) orientObs[obs] = p.getQuaternionFromEuler([0., np.pi/4, 0]) posObs, orientObs, colIdxs, visIdxs = self._generate_steps_sub(p, posObs, orientObs, colIdxs, numObs) obsUid = p.createMultiBody(baseCollisionShapeIndex=-1, baseVisualShapeIndex=-1, basePosition=[0, 0, 0], baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0], baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses, linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs, linkPositions=posObs, linkOrientations=orientObs, linkParentIndices=parentIdxs, linkInertialFramePositions=linkInertialFramePositions, linkInertialFrameOrientations=linkInertialFrameOrientations, linkJointTypes=linkJointTypes, linkJointAxis=linkJointAxis) for obs in range(numObs): p.changeVisualShape(obsUid, visIdxs[obs], textureUniqueId=self.terraintextureId) x_goal = self.goal y_goal = 0 posObs = np.array([None] * 3) posObs[0] = x_goal posObs[1] = y_goal posObs[2] = 0 # set z at ground level # colIdxs = p.createCollisionShape(p.GEOM_BOX, halfExtents=[0.1,5.0,0.1]) colIdxs = -1 visIdxs = p.createVisualShape(p.GEOM_BOX, halfExtents=[0.05,5.0,0.15], rgbaColor=[0.7, 0, 0, 1]) linkMasses = 0.0 parentIdxs = 0 linkInertialFramePositions = [0, 0, 0] linkInertialFrameOrientations = [0, 0, 0, 1] linkJointTypes = p.JOINT_FIXED linkJointAxis = np.array([0, 0, 1]) orientObs = p.getQuaternionFromEuler([0., 0., 0.]) p.createMultiBody(baseCollisionShapeIndex=colIdxs, baseVisualShapeIndex=visIdxs, basePosition=posObs)#, return obsUid def _generate_steps_sub(self, p, posObs, orientObs, colIdxs, numObs): visIdxs = [None]*numObs for obs in range(int(numObs/2)): # Cylindrical obstacles posObs_obs1 = [None] * 3 posObs_obs2 = [None] * 3 theta = np.random.rand(1)*(self.max_angle*math.pi/180)*(2/3) l1 = 0.5 l2 = l1 theta_rotate = theta h = l1 * np.sin(np.pi/4 - theta) /(2**0.5) d = (2**0.5) * l1 * np.cos(np.pi/4 + theta) halfExtents = [l1/2,5.0,l2/2] x_temp = 0.5 + obs*l1 y_temp = 0 posObs_obs1[0] = x_temp posObs_obs1[1] = y_temp posObs_obs1[2] = -h # set z at ground level posObs[2*obs] = posObs_obs1 colIdxs[2*obs] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents) orientObs[2*obs] = p.getQuaternionFromEuler([0., -theta_rotate, 0]) visIdxs[2*obs] = p.createVisualShape(p.GEOM_BOX, halfExtents=halfExtents,) # print(posObs_obs[0]) posObs_obs2[0] = x_temp + d # print(posObs_obs[0]) posObs_obs2[1] = y_temp posObs_obs2[2] = -h # set z at ground level posObs[2*obs+1] = posObs_obs2 colIdxs[2*obs+1] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents) orientObs[2*obs+1] = p.getQuaternionFromEuler([0., theta_rotate, 0]) visIdxs[2*obs+1] = p.createVisualShape(p.GEOM_BOX, halfExtents=halfExtents,) return posObs, orientObs, colIdxs, visIdxs # def _get_bounding_amplitude(self, prim): # return self.prim_lib[prim] def execute_policy(self, policy, goal, alpha, time_step=0.01, speed=40, comp_len=10, prim_horizon=50, image_size=50, device=torch.device('cuda'), record_vid=False, vid_num=0): if record_vid: import cv2 # videoObj = cv2.VideoWriter('video'+str(vid_num)+'.avi', cv2.VideoWriter_fourcc('M','J','P','G'), # 50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT)) videoObj = cv2.VideoWriter('video'+str(vid_num)+'.mp4', cv2.VideoWriter_fourcc(*'mp4v'), 50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT)) t_flag = 0 goal_cost = np.zeros(1) coll_cost = np.ones(1) cost = goal_cost + coll_cost total_time_steps = comp_len * prim_horizon # baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0], # baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses, # linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs, # linkPositions=posObs) # , linkOrientations=orientObs, linkParentIndices=parentIdxs, # linkInertialFramePositions=linkInertialFramePositions, # linkInertialFrameOrientations=linkInertialFrameOrientations, # linkJointTypes=linkJointTypes, linkJointAxis=linkJointAxis) for i in range(5): action = [0,0,0,0,0,0,0,0] self.minitaur_env.step(action) for i in range(comp_len): # Get current depth map cam_pos = list(self.minitaur_env.minitaur.GetBasePosition()) cam_orn = list(self.minitaur_env.minitaur.GetTrueBaseOrientation()) _, depth = self._mount_cam(cam_pos, cam_orn) # Decide primitive from the policy depth = torch.Tensor(depth).view([1, 1, image_size, image_size]) assert depth.nelement()!=0,"Tensor is empty." motor_angles = torch.Tensor(self.minitaur_env.minitaur.GetMotorAngles()).view([1,8]).detach() motor_velocities = torch.Tensor(self.minitaur_env.minitaur.GetMotorVelocities()).view([1,8]).detach() base_pos = torch.Tensor(self.minitaur_env.minitaur.GetBasePosition()).view([1,3]).detach() base_orn = torch.Tensor(self.minitaur_env.minitaur.GetBaseRollPitchYaw()).view([1,3]).detach() control_params = policy(depth.to(device), motor_angles.to(device), motor_velocities.to(device), base_pos.to(device), base_orn.to(device))[0] amplitude1 = (control_params[0].item()*0.8)+0.2 # amplitude1 = torch.clamp(control_params[0], min=0.2, max=1.0).item() amplitude2 = (control_params[1].item()*0.8)+0.2 # amplitude2 = torch.clamp(control_params[0], min=0.2, max=1.0).item() steering_amplitude = torch.clamp(control_params[2], min=0.0, max=min(1-amplitude1, 1-amplitude2)).item() # phase1 = control_params[3].item() * math.pi # phase2 = control_params[4].item() * math.pi speed = control_params[3].item()*20 + 20 for step_counter in range(prim_horizon): t = step_counter * time_step + t_flag # if t>4.1: # import matplotlib.pyplot as plt # cam_pos = list(self.minitaur_env.minitaur.GetBasePosition()) # cam_orn = list(self.minitaur_env.minitaur.GetTrueBaseOrientation()) # rgb, _ = self._mount_cam(cam_pos, cam_orn, w=500, h=500) # fig = plt.figure() # ax = plt.subplot(111) # ax.set_yticklabels([]) # ax.set_xticklabels([]) # plt.imshow(rgb, cmap='gray', interpolation='nearest') # plt.savefig('minitaur_rgb_view.png') # time.sleep(600) # amplitude1 = 0.5 # amplitude2 = 0.5 # steering_amplitude = 0.0 # speed = 50 if record_vid: rgb = self.minitaur_env.render() cv2.imshow('Vis_Vid_Rec', cv2.cvtColor(np.uint8(rgb), cv2.COLOR_RGB2BGR)) cv2.waitKey(1) videoObj.write(cv2.cvtColor(np.uint8(rgb), cv2.COLOR_RGB2BGR)) phase1 = math.pi phase2 = phase1 # Applying asymmetrical sine gaits to different legs can steer the minitaur. a1 = math.sin(t * speed) * (amplitude1 + steering_amplitude) a2 = math.sin(t * speed + phase1) * (amplitude1 - steering_amplitude) a3 = math.sin(t * speed) * amplitude2 a4 = math.sin(t * speed + phase2) * amplitude2 action = [a1, a2, a2, a1, a3, a4, a4, a3] self.minitaur_env.step(action) # Compute costs # rob_pos = np.array(cam_pos[0:2]) rob_pos = cam_pos[0] # goal_cost = np.linalg.norm(rob_pos-goal, ord=2)/10 goal_cost = np.abs(rob_pos-goal)/goal fall_cost = 1 - (step_counter + i*prim_horizon)/(total_time_steps-1) cost = alpha * fall_cost + (1-alpha) * goal_cost end_position = self.minitaur_env.minitaur.GetBasePosition() end_position = end_position[0] if self.is_fallen(): if record_vid: videoObj.release() return cost, fall_cost, goal_cost, end_position if end_position>goal: goal_cost = 0. cost = alpha * fall_cost + (1-alpha) * goal_cost if record_vid: videoObj.release() return cost, fall_cost, goal_cost, end_position end_position = self.minitaur_env.minitaur.GetBasePosition() end_position = end_position[0] # print(end_position) # print("Speed:", np.linalg.norm(start_position-end_position)/(prim_horizon*time_step)) t_flag += prim_horizon*time_step if record_vid: videoObj.release() return cost, fall_cost, goal_cost, end_position def _mount_cam(self, base_p, base_o, w=50, h=50): ''' Mounts an RGB-D camera on a robot in pybullet
---------- w : Width h : Height base_p : Base position base_o : Base orientation as a quaternion Returns ------- rgb : RGB image depth : Depth map ''' p = self.p cam_pos = base_p # Rotation matrix rot_matrix = p.getMatrixFromQuaternion(base_o) rot_matrix = np.array(rot_matrix).reshape(3, 3) # Initial vectors init_camera_vector = (1, 0, -0.05) # x-axis init_up_vector = (0, 0, 1) # z-axis cam_translate = (0.1,0,0.1) # Shift camera from the base poitions w.r.t. to the link's local frame # Rotated vectors camera_vector = rot_matrix.dot(init_camera_vector) up_vector = rot_matrix.dot(init_up_vector) cam_pos = cam_pos + rot_matrix.dot(cam_translate) view_matrix = p.computeViewMatrix(cam_pos, cam_pos + 0.1 * camera_vector, up_vector) # Get Image projection_matrix = p.computeProjectionMatrixFOV(fov=90.0, aspect=1., nearVal=0.01, farVal=1000.) # heightfield does not work with the tiny renderer, have to use the openGL renderer _, _, rgb, depth, _ = p.getCameraImage(w, h, view_matrix, projection_matrix)#, flags=p.ER_NO_SEGMENTATION_MASK) # Reshape rgb image and drop the alpha layer (#4) rgb = np.array(rgb, dtype=np.uint8) rgb = np.reshape(rgb, (w, h, 4)) rgb = rgb[:, :, :3] # Reshape depth map depth = np.array(depth, dtype=np.float32) far=1000.0 near=0.01 depth = far*near/(far - (far - near)*depth) depth = np.reshape(depth, (w, h)) return rgb, depth if __name__ == '__main__': env = Environment([0,0.1], gui=True) env.terrain = env.generate_htfield() env.minitaur_env.reset() policy = Policy() policy = policy.to('cuda') cost, coll_cost, goal_cost = env.execute_policy(policy, goal=np.ones(2), alpha=0.5) print(cost, coll_cost, goal_cost)
Parameters
random_line_split
Minitaur_Env.py
#!/usr/bin/env python3 import os # SUPPRESS PRINTING null_fds = [os.open(os.devnull, os.O_RDWR) for x in range(2)] save = os.dup(1), os.dup(2) os.dup2(null_fds[0], 1) os.dup2(null_fds[1], 2) import numpy as np from pybullet_envs.minitaur.envs import minitaur_gym_env import math from policy.minitaur_policy import Policy import torch import warnings warnings.filterwarnings('ignore') # Set the initial position from pybullet_envs.minitaur.envs import minitaur minitaur.INIT_POSITION = [0, 0, 0.2] # ENABLE PRINTING os.dup2(save[0], 1) os.dup2(save[1], 2) os.close(null_fds[0]) os.close(null_fds[1]) class Environment: def __init__(self, max_angle, time_step=0.02, gui=False): self.gui = gui self.max_angle = max_angle if self.gui: self.minitaur_env = minitaur_gym_env.MinitaurGymEnv( urdf_version=minitaur_gym_env.DERPY_V0_URDF_VERSION, render=True, motor_velocity_limit=np.inf, pd_control_enabled=True, hard_reset=False, on_rack=False, reflection=False) else: self.minitaur_env = minitaur_gym_env.MinitaurGymEnv( urdf_version=minitaur_gym_env.DERPY_V0_URDF_VERSION, render=False, motor_velocity_limit=np.inf, pd_control_enabled=True, hard_reset=False, on_rack=False) self.minitaur_env.minitaur.time_step = time_step self.p = self.minitaur_env._pybullet_client # self.minitaur_env.minitaur.SetFootFriction(1.0) # self.minitaur_env.minitaur.SetFootRestitution(0.1) # self.prim_lib = np.load('prim_lib.npy') # textureId = self.p.loadTexture("heightmaps/table.png") # self.p.changeVisualShape(self.minitaur_env.ground_id, -1, textureUniqueId=textureId) self.terraintextureId = self.p.loadTexture("heightmaps/oak-wood.png") def generate_htfield(self, num_rows=12): '''Generate a heightfield. Resource: https://github.com/bulletphysics/bullet3/blob/master/examples/pybullet/examples/heightfield.py''' p = self.p numHeightfieldRows = num_rows numHeightfieldColumns = num_rows heightfieldData = [0]*numHeightfieldRows*numHeightfieldColumns for i in range(numHeightfieldRows*numHeightfieldColumns): # heightfieldData[i] = 0.1 if (i%2)==0: heightfieldData[i] = np.random.uniform(self.h_lim[0],self.h_lim[1]) else: heightfieldData[i] = 0 terrainShape = p.createCollisionShape(shapeType = p.GEOM_HEIGHTFIELD, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, # this flag ensures foot does not get stuck meshScale=[1,1,1], heightfieldTextureScaling=(numHeightfieldRows-1)/2, heightfieldData=heightfieldData, numHeightfieldRows=numHeightfieldRows, numHeightfieldColumns=numHeightfieldColumns) textureId = p.loadTexture("heightmaps/wm_height_out.png") terrain = p.createMultiBody(0, terrainShape) # p.changeVisualShape(terrain, -1, rgbaColor=[1,1,1,1]) p.changeVisualShape(terrain, -1, textureUniqueId = textureId) # Remove the previous terrain and establish the new one # Note: first time this function is called, the default terrain of minitaur_env # which is plane ground is removed. Subsequent calls remove the previous heightfield self.minitaur_env._pybullet_client.removeBody(self.minitaur_env.ground_id) self.minitaur_env.ground_id = terrain return terrain def is_fallen(self): """Decide whether the minitaur has fallen. If the up directions between the base and the world is larger (the dot product is smaller than 0.5), the minitaur is considered fallen. Returns: Boolean value that indicates whether the minitaur has fallen. """ orientation = self.minitaur_env.minitaur.GetBaseOrientation() rot_mat = self.minitaur_env._pybullet_client.getMatrixFromQuaternion(orientation) local_up = rot_mat[6:] return (np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.3) def generate_steps(self, numObs=25): p = self.p numObs *= 2 linkMasses = [None] * (numObs) colIdxs = [None] * (numObs) visIdxs = [None] * (numObs) posObs = [None] * (numObs) orientObs = [None] * (numObs) parentIdxs = [None] * (numObs) linkInertialFramePositions = [None] * (numObs) linkInertialFrameOrientations = [None] * (numObs) linkJointTypes = [None] * (numObs) linkJointAxis = [None] * (numObs) for obs in range(numObs): linkMasses[obs] = 0.0 parentIdxs[obs] = 0 linkInertialFramePositions[obs] = [0, 0, 0] linkInertialFrameOrientations[obs] = [0, 0, 0, 1] linkJointTypes[obs] = p.JOINT_FIXED linkJointAxis[obs] = np.array([0, 0, 1]) orientObs[obs] = p.getQuaternionFromEuler([0., np.pi/4, 0]) posObs, orientObs, colIdxs, visIdxs = self._generate_steps_sub(p, posObs, orientObs, colIdxs, numObs) obsUid = p.createMultiBody(baseCollisionShapeIndex=-1, baseVisualShapeIndex=-1, basePosition=[0, 0, 0], baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0], baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses, linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs, linkPositions=posObs, linkOrientations=orientObs, linkParentIndices=parentIdxs, linkInertialFramePositions=linkInertialFramePositions, linkInertialFrameOrientations=linkInertialFrameOrientations, linkJointTypes=linkJointTypes, linkJointAxis=linkJointAxis) for obs in range(numObs): p.changeVisualShape(obsUid, visIdxs[obs], textureUniqueId=self.terraintextureId) x_goal = self.goal y_goal = 0 posObs = np.array([None] * 3) posObs[0] = x_goal posObs[1] = y_goal posObs[2] = 0 # set z at ground level # colIdxs = p.createCollisionShape(p.GEOM_BOX, halfExtents=[0.1,5.0,0.1]) colIdxs = -1 visIdxs = p.createVisualShape(p.GEOM_BOX, halfExtents=[0.05,5.0,0.15], rgbaColor=[0.7, 0, 0, 1]) linkMasses = 0.0 parentIdxs = 0 linkInertialFramePositions = [0, 0, 0] linkInertialFrameOrientations = [0, 0, 0, 1] linkJointTypes = p.JOINT_FIXED linkJointAxis = np.array([0, 0, 1]) orientObs = p.getQuaternionFromEuler([0., 0., 0.]) p.createMultiBody(baseCollisionShapeIndex=colIdxs, baseVisualShapeIndex=visIdxs, basePosition=posObs)#, return obsUid def _generate_steps_sub(self, p, posObs, orientObs, colIdxs, numObs): visIdxs = [None]*numObs for obs in range(int(numObs/2)): # Cylindrical obstacles posObs_obs1 = [None] * 3 posObs_obs2 = [None] * 3 theta = np.random.rand(1)*(self.max_angle*math.pi/180)*(2/3) l1 = 0.5 l2 = l1 theta_rotate = theta h = l1 * np.sin(np.pi/4 - theta) /(2**0.5) d = (2**0.5) * l1 * np.cos(np.pi/4 + theta) halfExtents = [l1/2,5.0,l2/2] x_temp = 0.5 + obs*l1 y_temp = 0 posObs_obs1[0] = x_temp posObs_obs1[1] = y_temp posObs_obs1[2] = -h # set z at ground level posObs[2*obs] = posObs_obs1 colIdxs[2*obs] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents) orientObs[2*obs] = p.getQuaternionFromEuler([0., -theta_rotate, 0]) visIdxs[2*obs] = p.createVisualShape(p.GEOM_BOX, halfExtents=halfExtents,) # print(posObs_obs[0]) posObs_obs2[0] = x_temp + d # print(posObs_obs[0]) posObs_obs2[1] = y_temp posObs_obs2[2] = -h # set z at ground level posObs[2*obs+1] = posObs_obs2 colIdxs[2*obs+1] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents) orientObs[2*obs+1] = p.getQuaternionFromEuler([0., theta_rotate, 0]) visIdxs[2*obs+1] = p.createVisualShape(p.GEOM_BOX, halfExtents=halfExtents,) return posObs, orientObs, colIdxs, visIdxs # def _get_bounding_amplitude(self, prim): # return self.prim_lib[prim] def
(self, policy, goal, alpha, time_step=0.01, speed=40, comp_len=10, prim_horizon=50, image_size=50, device=torch.device('cuda'), record_vid=False, vid_num=0): if record_vid: import cv2 # videoObj = cv2.VideoWriter('video'+str(vid_num)+'.avi', cv2.VideoWriter_fourcc('M','J','P','G'), # 50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT)) videoObj = cv2.VideoWriter('video'+str(vid_num)+'.mp4', cv2.VideoWriter_fourcc(*'mp4v'), 50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT)) t_flag = 0 goal_cost = np.zeros(1) coll_cost = np.ones(1) cost = goal_cost + coll_cost total_time_steps = comp_len * prim_horizon # baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0], # baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses, # linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs, # linkPositions=posObs) # , linkOrientations=orientObs, linkParentIndices=parentIdxs, # linkInertialFramePositions=linkInertialFramePositions, # linkInertialFrameOrientations=linkInertialFrameOrientations, # linkJointTypes=linkJointTypes, linkJointAxis=linkJointAxis) for i in range(5): action = [0,0,0,0,0,0,0,0] self.minitaur_env.step(action) for i in range(comp_len): # Get current depth map cam_pos = list(self.minitaur_env.minitaur.GetBasePosition()) cam_orn = list(self.minitaur_env.minitaur.GetTrueBaseOrientation()) _, depth = self._mount_cam(cam_pos, cam_orn) # Decide primitive from the policy depth = torch.Tensor(depth).view([1, 1, image_size, image_size]) assert depth.nelement()!=0,"Tensor is empty." motor_angles = torch.Tensor(self.minitaur_env.minitaur.GetMotorAngles()).view([1,8]).detach() motor_velocities = torch.Tensor(self.minitaur_env.minitaur.GetMotorVelocities()).view([1,8]).detach() base_pos = torch.Tensor(self.minitaur_env.minitaur.GetBasePosition()).view([1,3]).detach() base_orn = torch.Tensor(self.minitaur_env.minitaur.GetBaseRollPitchYaw()).view([1,3]).detach() control_params = policy(depth.to(device), motor_angles.to(device), motor_velocities.to(device), base_pos.to(device), base_orn.to(device))[0] amplitude1 = (control_params[0].item()*0.8)+0.2 # amplitude1 = torch.clamp(control_params[0], min=0.2, max=1.0).item() amplitude2 = (control_params[1].item()*0.8)+0.2 # amplitude2 = torch.clamp(control_params[0], min=0.2, max=1.0).item() steering_amplitude = torch.clamp(control_params[2], min=0.0, max=min(1-amplitude1, 1-amplitude2)).item() # phase1 = control_params[3].item() * math.pi # phase2 = control_params[4].item() * math.pi speed = control_params[3].item()*20 + 20 for step_counter in range(prim_horizon): t = step_counter * time_step + t_flag # if t>4.1: # import matplotlib.pyplot as plt # cam_pos = list(self.minitaur_env.minitaur.GetBasePosition()) # cam_orn = list(self.minitaur_env.minitaur.GetTrueBaseOrientation()) # rgb, _ = self._mount_cam(cam_pos, cam_orn, w=500, h=500) # fig = plt.figure() # ax = plt.subplot(111) # ax.set_yticklabels([]) # ax.set_xticklabels([]) # plt.imshow(rgb, cmap='gray', interpolation='nearest') # plt.savefig('minitaur_rgb_view.png') # time.sleep(600) # amplitude1 = 0.5 # amplitude2 = 0.5 # steering_amplitude = 0.0 # speed = 50 if record_vid: rgb = self.minitaur_env.render() cv2.imshow('Vis_Vid_Rec', cv2.cvtColor(np.uint8(rgb), cv2.COLOR_RGB2BGR)) cv2.waitKey(1) videoObj.write(cv2.cvtColor(np.uint8(rgb), cv2.COLOR_RGB2BGR)) phase1 = math.pi phase2 = phase1 # Applying asymmetrical sine gaits to different legs can steer the minitaur. a1 = math.sin(t * speed) * (amplitude1 + steering_amplitude) a2 = math.sin(t * speed + phase1) * (amplitude1 - steering_amplitude) a3 = math.sin(t * speed) * amplitude2 a4 = math.sin(t * speed + phase2) * amplitude2 action = [a1, a2, a2, a1, a3, a4, a4, a3] self.minitaur_env.step(action) # Compute costs # rob_pos = np.array(cam_pos[0:2]) rob_pos = cam_pos[0] # goal_cost = np.linalg.norm(rob_pos-goal, ord=2)/10 goal_cost = np.abs(rob_pos-goal)/goal fall_cost = 1 - (step_counter + i*prim_horizon)/(total_time_steps-1) cost = alpha * fall_cost + (1-alpha) * goal_cost end_position = self.minitaur_env.minitaur.GetBasePosition() end_position = end_position[0] if self.is_fallen(): if record_vid: videoObj.release() return cost, fall_cost, goal_cost, end_position if end_position>goal: goal_cost = 0. cost = alpha * fall_cost + (1-alpha) * goal_cost if record_vid: videoObj.release() return cost, fall_cost, goal_cost, end_position end_position = self.minitaur_env.minitaur.GetBasePosition() end_position = end_position[0] # print(end_position) # print("Speed:", np.linalg.norm(start_position-end_position)/(prim_horizon*time_step)) t_flag += prim_horizon*time_step if record_vid: videoObj.release() return cost, fall_cost, goal_cost, end_position def _mount_cam(self, base_p, base_o, w=50, h=50): ''' Mounts an RGB-D camera on a robot in pybullet Parameters ---------- w : Width h : Height base_p : Base position base_o : Base orientation as a quaternion Returns ------- rgb : RGB image depth : Depth map ''' p = self.p cam_pos = base_p # Rotation matrix rot_matrix = p.getMatrixFromQuaternion(base_o) rot_matrix = np.array(rot_matrix).reshape(3, 3) # Initial vectors init_camera_vector = (1, 0, -0.05) # x-axis init_up_vector = (0, 0, 1) # z-axis cam_translate = (0.1,0,0.1) # Shift camera from the base poitions w.r.t. to the link's local frame # Rotated vectors camera_vector = rot_matrix.dot(init_camera_vector) up_vector = rot_matrix.dot(init_up_vector) cam_pos = cam_pos + rot_matrix.dot(cam_translate) view_matrix = p.computeViewMatrix(cam_pos, cam_pos + 0.1 * camera_vector, up_vector) # Get Image projection_matrix = p.computeProjectionMatrixFOV(fov=90.0, aspect=1., nearVal=0.01, farVal=1000.) # heightfield does not work with the tiny renderer, have to use the openGL renderer _, _, rgb, depth, _ = p.getCameraImage(w, h, view_matrix, projection_matrix)#, flags=p.ER_NO_SEGMENTATION_MASK) # Reshape rgb image and drop the alpha layer (#4) rgb = np.array(rgb, dtype=np.uint8) rgb = np.reshape(rgb, (w, h, 4)) rgb = rgb[:, :, :3] # Reshape depth map depth = np.array(depth, dtype=np.float32) far=1000.0 near=0.01 depth = far*near/(far - (far - near)*depth) depth = np.reshape(depth, (w, h)) return rgb, depth if __name__ == '__main__': env = Environment([0,0.1], gui=True) env.terrain = env.generate_htfield() env.minitaur_env.reset() policy = Policy() policy = policy.to('cuda') cost, coll_cost, goal_cost = env.execute_policy(policy, goal=np.ones(2), alpha=0.5) print(cost, coll_cost, goal_cost)
execute_policy
identifier_name
Minitaur_Env.py
#!/usr/bin/env python3 import os # SUPPRESS PRINTING null_fds = [os.open(os.devnull, os.O_RDWR) for x in range(2)] save = os.dup(1), os.dup(2) os.dup2(null_fds[0], 1) os.dup2(null_fds[1], 2) import numpy as np from pybullet_envs.minitaur.envs import minitaur_gym_env import math from policy.minitaur_policy import Policy import torch import warnings warnings.filterwarnings('ignore') # Set the initial position from pybullet_envs.minitaur.envs import minitaur minitaur.INIT_POSITION = [0, 0, 0.2] # ENABLE PRINTING os.dup2(save[0], 1) os.dup2(save[1], 2) os.close(null_fds[0]) os.close(null_fds[1]) class Environment: def __init__(self, max_angle, time_step=0.02, gui=False): self.gui = gui self.max_angle = max_angle if self.gui: self.minitaur_env = minitaur_gym_env.MinitaurGymEnv( urdf_version=minitaur_gym_env.DERPY_V0_URDF_VERSION, render=True, motor_velocity_limit=np.inf, pd_control_enabled=True, hard_reset=False, on_rack=False, reflection=False) else: self.minitaur_env = minitaur_gym_env.MinitaurGymEnv( urdf_version=minitaur_gym_env.DERPY_V0_URDF_VERSION, render=False, motor_velocity_limit=np.inf, pd_control_enabled=True, hard_reset=False, on_rack=False) self.minitaur_env.minitaur.time_step = time_step self.p = self.minitaur_env._pybullet_client # self.minitaur_env.minitaur.SetFootFriction(1.0) # self.minitaur_env.minitaur.SetFootRestitution(0.1) # self.prim_lib = np.load('prim_lib.npy') # textureId = self.p.loadTexture("heightmaps/table.png") # self.p.changeVisualShape(self.minitaur_env.ground_id, -1, textureUniqueId=textureId) self.terraintextureId = self.p.loadTexture("heightmaps/oak-wood.png") def generate_htfield(self, num_rows=12): '''Generate a heightfield. Resource: https://github.com/bulletphysics/bullet3/blob/master/examples/pybullet/examples/heightfield.py''' p = self.p numHeightfieldRows = num_rows numHeightfieldColumns = num_rows heightfieldData = [0]*numHeightfieldRows*numHeightfieldColumns for i in range(numHeightfieldRows*numHeightfieldColumns): # heightfieldData[i] = 0.1 if (i%2)==0: heightfieldData[i] = np.random.uniform(self.h_lim[0],self.h_lim[1]) else: heightfieldData[i] = 0 terrainShape = p.createCollisionShape(shapeType = p.GEOM_HEIGHTFIELD, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, # this flag ensures foot does not get stuck meshScale=[1,1,1], heightfieldTextureScaling=(numHeightfieldRows-1)/2, heightfieldData=heightfieldData, numHeightfieldRows=numHeightfieldRows, numHeightfieldColumns=numHeightfieldColumns) textureId = p.loadTexture("heightmaps/wm_height_out.png") terrain = p.createMultiBody(0, terrainShape) # p.changeVisualShape(terrain, -1, rgbaColor=[1,1,1,1]) p.changeVisualShape(terrain, -1, textureUniqueId = textureId) # Remove the previous terrain and establish the new one # Note: first time this function is called, the default terrain of minitaur_env # which is plane ground is removed. Subsequent calls remove the previous heightfield self.minitaur_env._pybullet_client.removeBody(self.minitaur_env.ground_id) self.minitaur_env.ground_id = terrain return terrain def is_fallen(self): """Decide whether the minitaur has fallen. If the up directions between the base and the world is larger (the dot product is smaller than 0.5), the minitaur is considered fallen. Returns: Boolean value that indicates whether the minitaur has fallen. """ orientation = self.minitaur_env.minitaur.GetBaseOrientation() rot_mat = self.minitaur_env._pybullet_client.getMatrixFromQuaternion(orientation) local_up = rot_mat[6:] return (np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.3) def generate_steps(self, numObs=25): p = self.p numObs *= 2 linkMasses = [None] * (numObs) colIdxs = [None] * (numObs) visIdxs = [None] * (numObs) posObs = [None] * (numObs) orientObs = [None] * (numObs) parentIdxs = [None] * (numObs) linkInertialFramePositions = [None] * (numObs) linkInertialFrameOrientations = [None] * (numObs) linkJointTypes = [None] * (numObs) linkJointAxis = [None] * (numObs) for obs in range(numObs): linkMasses[obs] = 0.0 parentIdxs[obs] = 0 linkInertialFramePositions[obs] = [0, 0, 0] linkInertialFrameOrientations[obs] = [0, 0, 0, 1] linkJointTypes[obs] = p.JOINT_FIXED linkJointAxis[obs] = np.array([0, 0, 1]) orientObs[obs] = p.getQuaternionFromEuler([0., np.pi/4, 0]) posObs, orientObs, colIdxs, visIdxs = self._generate_steps_sub(p, posObs, orientObs, colIdxs, numObs) obsUid = p.createMultiBody(baseCollisionShapeIndex=-1, baseVisualShapeIndex=-1, basePosition=[0, 0, 0], baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0], baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses, linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs, linkPositions=posObs, linkOrientations=orientObs, linkParentIndices=parentIdxs, linkInertialFramePositions=linkInertialFramePositions, linkInertialFrameOrientations=linkInertialFrameOrientations, linkJointTypes=linkJointTypes, linkJointAxis=linkJointAxis) for obs in range(numObs):
x_goal = self.goal y_goal = 0 posObs = np.array([None] * 3) posObs[0] = x_goal posObs[1] = y_goal posObs[2] = 0 # set z at ground level # colIdxs = p.createCollisionShape(p.GEOM_BOX, halfExtents=[0.1,5.0,0.1]) colIdxs = -1 visIdxs = p.createVisualShape(p.GEOM_BOX, halfExtents=[0.05,5.0,0.15], rgbaColor=[0.7, 0, 0, 1]) linkMasses = 0.0 parentIdxs = 0 linkInertialFramePositions = [0, 0, 0] linkInertialFrameOrientations = [0, 0, 0, 1] linkJointTypes = p.JOINT_FIXED linkJointAxis = np.array([0, 0, 1]) orientObs = p.getQuaternionFromEuler([0., 0., 0.]) p.createMultiBody(baseCollisionShapeIndex=colIdxs, baseVisualShapeIndex=visIdxs, basePosition=posObs)#, return obsUid def _generate_steps_sub(self, p, posObs, orientObs, colIdxs, numObs): visIdxs = [None]*numObs for obs in range(int(numObs/2)): # Cylindrical obstacles posObs_obs1 = [None] * 3 posObs_obs2 = [None] * 3 theta = np.random.rand(1)*(self.max_angle*math.pi/180)*(2/3) l1 = 0.5 l2 = l1 theta_rotate = theta h = l1 * np.sin(np.pi/4 - theta) /(2**0.5) d = (2**0.5) * l1 * np.cos(np.pi/4 + theta) halfExtents = [l1/2,5.0,l2/2] x_temp = 0.5 + obs*l1 y_temp = 0 posObs_obs1[0] = x_temp posObs_obs1[1] = y_temp posObs_obs1[2] = -h # set z at ground level posObs[2*obs] = posObs_obs1 colIdxs[2*obs] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents) orientObs[2*obs] = p.getQuaternionFromEuler([0., -theta_rotate, 0]) visIdxs[2*obs] = p.createVisualShape(p.GEOM_BOX, halfExtents=halfExtents,) # print(posObs_obs[0]) posObs_obs2[0] = x_temp + d # print(posObs_obs[0]) posObs_obs2[1] = y_temp posObs_obs2[2] = -h # set z at ground level posObs[2*obs+1] = posObs_obs2 colIdxs[2*obs+1] = p.createCollisionShape(p.GEOM_BOX, flags = p.GEOM_CONCAVE_INTERNAL_EDGE, halfExtents=halfExtents) orientObs[2*obs+1] = p.getQuaternionFromEuler([0., theta_rotate, 0]) visIdxs[2*obs+1] = p.createVisualShape(p.GEOM_BOX, halfExtents=halfExtents,) return posObs, orientObs, colIdxs, visIdxs # def _get_bounding_amplitude(self, prim): # return self.prim_lib[prim] def execute_policy(self, policy, goal, alpha, time_step=0.01, speed=40, comp_len=10, prim_horizon=50, image_size=50, device=torch.device('cuda'), record_vid=False, vid_num=0): if record_vid: import cv2 # videoObj = cv2.VideoWriter('video'+str(vid_num)+'.avi', cv2.VideoWriter_fourcc('M','J','P','G'), # 50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT)) videoObj = cv2.VideoWriter('video'+str(vid_num)+'.mp4', cv2.VideoWriter_fourcc(*'mp4v'), 50, (minitaur_gym_env.RENDER_WIDTH, minitaur_gym_env.RENDER_HEIGHT)) t_flag = 0 goal_cost = np.zeros(1) coll_cost = np.ones(1) cost = goal_cost + coll_cost total_time_steps = comp_len * prim_horizon # baseOrientation=[0, 0, 0, 1], baseInertialFramePosition=[0, 0, 0], # baseInertialFrameOrientation=[0, 0, 0, 1], linkMasses=linkMasses, # linkCollisionShapeIndices=colIdxs, linkVisualShapeIndices=visIdxs, # linkPositions=posObs) # , linkOrientations=orientObs, linkParentIndices=parentIdxs, # linkInertialFramePositions=linkInertialFramePositions, # linkInertialFrameOrientations=linkInertialFrameOrientations, # linkJointTypes=linkJointTypes, linkJointAxis=linkJointAxis) for i in range(5): action = [0,0,0,0,0,0,0,0] self.minitaur_env.step(action) for i in range(comp_len): # Get current depth map cam_pos = list(self.minitaur_env.minitaur.GetBasePosition()) cam_orn = list(self.minitaur_env.minitaur.GetTrueBaseOrientation()) _, depth = self._mount_cam(cam_pos, cam_orn) # Decide primitive from the policy depth = torch.Tensor(depth).view([1, 1, image_size, image_size]) assert depth.nelement()!=0,"Tensor is empty." motor_angles = torch.Tensor(self.minitaur_env.minitaur.GetMotorAngles()).view([1,8]).detach() motor_velocities = torch.Tensor(self.minitaur_env.minitaur.GetMotorVelocities()).view([1,8]).detach() base_pos = torch.Tensor(self.minitaur_env.minitaur.GetBasePosition()).view([1,3]).detach() base_orn = torch.Tensor(self.minitaur_env.minitaur.GetBaseRollPitchYaw()).view([1,3]).detach() control_params = policy(depth.to(device), motor_angles.to(device), motor_velocities.to(device), base_pos.to(device), base_orn.to(device))[0] amplitude1 = (control_params[0].item()*0.8)+0.2 # amplitude1 = torch.clamp(control_params[0], min=0.2, max=1.0).item() amplitude2 = (control_params[1].item()*0.8)+0.2 # amplitude2 = torch.clamp(control_params[0], min=0.2, max=1.0).item() steering_amplitude = torch.clamp(control_params[2], min=0.0, max=min(1-amplitude1, 1-amplitude2)).item() # phase1 = control_params[3].item() * math.pi # phase2 = control_params[4].item() * math.pi speed = control_params[3].item()*20 + 20 for step_counter in range(prim_horizon): t = step_counter * time_step + t_flag # if t>4.1: # import matplotlib.pyplot as plt # cam_pos = list(self.minitaur_env.minitaur.GetBasePosition()) # cam_orn = list(self.minitaur_env.minitaur.GetTrueBaseOrientation()) # rgb, _ = self._mount_cam(cam_pos, cam_orn, w=500, h=500) # fig = plt.figure() # ax = plt.subplot(111) # ax.set_yticklabels([]) # ax.set_xticklabels([]) # plt.imshow(rgb, cmap='gray', interpolation='nearest') # plt.savefig('minitaur_rgb_view.png') # time.sleep(600) # amplitude1 = 0.5 # amplitude2 = 0.5 # steering_amplitude = 0.0 # speed = 50 if record_vid: rgb = self.minitaur_env.render() cv2.imshow('Vis_Vid_Rec', cv2.cvtColor(np.uint8(rgb), cv2.COLOR_RGB2BGR)) cv2.waitKey(1) videoObj.write(cv2.cvtColor(np.uint8(rgb), cv2.COLOR_RGB2BGR)) phase1 = math.pi phase2 = phase1 # Applying asymmetrical sine gaits to different legs can steer the minitaur. a1 = math.sin(t * speed) * (amplitude1 + steering_amplitude) a2 = math.sin(t * speed + phase1) * (amplitude1 - steering_amplitude) a3 = math.sin(t * speed) * amplitude2 a4 = math.sin(t * speed + phase2) * amplitude2 action = [a1, a2, a2, a1, a3, a4, a4, a3] self.minitaur_env.step(action) # Compute costs # rob_pos = np.array(cam_pos[0:2]) rob_pos = cam_pos[0] # goal_cost = np.linalg.norm(rob_pos-goal, ord=2)/10 goal_cost = np.abs(rob_pos-goal)/goal fall_cost = 1 - (step_counter + i*prim_horizon)/(total_time_steps-1) cost = alpha * fall_cost + (1-alpha) * goal_cost end_position = self.minitaur_env.minitaur.GetBasePosition() end_position = end_position[0] if self.is_fallen(): if record_vid: videoObj.release() return cost, fall_cost, goal_cost, end_position if end_position>goal: goal_cost = 0. cost = alpha * fall_cost + (1-alpha) * goal_cost if record_vid: videoObj.release() return cost, fall_cost, goal_cost, end_position end_position = self.minitaur_env.minitaur.GetBasePosition() end_position = end_position[0] # print(end_position) # print("Speed:", np.linalg.norm(start_position-end_position)/(prim_horizon*time_step)) t_flag += prim_horizon*time_step if record_vid: videoObj.release() return cost, fall_cost, goal_cost, end_position def _mount_cam(self, base_p, base_o, w=50, h=50): ''' Mounts an RGB-D camera on a robot in pybullet Parameters ---------- w : Width h : Height base_p : Base position base_o : Base orientation as a quaternion Returns ------- rgb : RGB image depth : Depth map ''' p = self.p cam_pos = base_p # Rotation matrix rot_matrix = p.getMatrixFromQuaternion(base_o) rot_matrix = np.array(rot_matrix).reshape(3, 3) # Initial vectors init_camera_vector = (1, 0, -0.05) # x-axis init_up_vector = (0, 0, 1) # z-axis cam_translate = (0.1,0,0.1) # Shift camera from the base poitions w.r.t. to the link's local frame # Rotated vectors camera_vector = rot_matrix.dot(init_camera_vector) up_vector = rot_matrix.dot(init_up_vector) cam_pos = cam_pos + rot_matrix.dot(cam_translate) view_matrix = p.computeViewMatrix(cam_pos, cam_pos + 0.1 * camera_vector, up_vector) # Get Image projection_matrix = p.computeProjectionMatrixFOV(fov=90.0, aspect=1., nearVal=0.01, farVal=1000.) # heightfield does not work with the tiny renderer, have to use the openGL renderer _, _, rgb, depth, _ = p.getCameraImage(w, h, view_matrix, projection_matrix)#, flags=p.ER_NO_SEGMENTATION_MASK) # Reshape rgb image and drop the alpha layer (#4) rgb = np.array(rgb, dtype=np.uint8) rgb = np.reshape(rgb, (w, h, 4)) rgb = rgb[:, :, :3] # Reshape depth map depth = np.array(depth, dtype=np.float32) far=1000.0 near=0.01 depth = far*near/(far - (far - near)*depth) depth = np.reshape(depth, (w, h)) return rgb, depth if __name__ == '__main__': env = Environment([0,0.1], gui=True) env.terrain = env.generate_htfield() env.minitaur_env.reset() policy = Policy() policy = policy.to('cuda') cost, coll_cost, goal_cost = env.execute_policy(policy, goal=np.ones(2), alpha=0.5) print(cost, coll_cost, goal_cost)
p.changeVisualShape(obsUid, visIdxs[obs], textureUniqueId=self.terraintextureId)
conditional_block
content-parse.ts
// @unimport-disable import type { mastodon } from 'masto' import type { Node } from 'ultrahtml' import { DOCUMENT_NODE, ELEMENT_NODE, TEXT_NODE, h, parse, render } from 'ultrahtml' import { findAndReplaceEmojisInText } from '@iconify/utils' import { decode } from 'tiny-decode' import { emojiRegEx, getEmojiAttributes } from '../config/emojis' export interface ContentParseOptions { emojis?: Record<string, mastodon.v1.CustomEmoji> hideEmojis?: boolean mentions?: mastodon.v1.StatusMention[] markdown?: boolean replaceUnicodeEmoji?: boolean astTransforms?: Transform[] convertMentionLink?: boolean collapseMentionLink?: boolean status?: mastodon.v1.Status inReplyToStatus?: mastodon.v1.Status } const sanitizerBasicClasses = filterClasses(/^(h-\S*|p-\S*|u-\S*|dt-\S*|e-\S*|mention|hashtag|ellipsis|invisible)$/u) const sanitizer = sanitize({ // Allow basic elements as seen in https://github.com/mastodon/mastodon/blob/17f79082b098e05b68d6f0d38fabb3ac121879a9/lib/sanitize_ext/sanitize_config.rb br: {}, p: {}, a: { href: filterHref(), class: sanitizerBasicClasses, rel: set('nofollow noopener noreferrer'), target: set('_blank'), }, span: { class: sanitizerBasicClasses, }, // Allow elements potentially created for Markdown code blocks above pre: {}, code: { class: filterClasses(/^language-\w+$/), }, // Other elements supported in glitch, as seen in // https://github.com/glitch-soc/mastodon/blob/13227e1dafd308dfe1a3effc3379b766274809b3/lib/sanitize_ext/sanitize_config.rb#L75 abbr: { title: keep, }, del: {}, blockquote: { cite: filterHref(), }, b: {}, strong: {}, u: {}, sub: {}, sup: {}, i: {}, em: {}, h1: {}, h2: {}, h3: {}, h4: {}, h5: {}, ul: {}, ol: { start: keep, reversed: keep, }, li: { value: keep, }, }) /** * Parse raw HTML form Mastodon server to AST, * with interop of custom emojis and inline Markdown syntax */ export function parseMastodonHTML( html: string, options: ContentParseOptions = {}, ) { const { markdown = true, replaceUnicodeEmoji = true, convertMentionLink = false, collapseMentionLink = false, hideEmojis = false, mentions, status, inReplyToStatus, } = options if (markdown) { // Handle code blocks html = html .replace(/>(```|~~~)(\w*)([\s\S]+?)\1/g, (_1, _2, lang: string, raw: string) => { const code = htmlToText(raw) .replace(/</g, '&lt;') .replace(/>/g, '&gt;') .replace(/`/g, '&#96;') const classes = lang ? ` class="language-${lang}"` : '' return `><pre><code${classes}>${code}</code></pre>` }) .replace(/`([^`\n]*)`/g, (_1, raw) => { return raw ? `<code>${htmlToText(raw).replace(/</g, '&lt;').replace(/>/g, '&gt;')}</code>` : '' }) } // Always sanitize the raw HTML data *after* it has been modified const transforms: Transform[] = [ sanitizer, ...options.astTransforms || [], ] if (hideEmojis) { transforms.push(removeUnicodeEmoji) transforms.push(removeCustomEmoji(options.emojis ?? {})) } else { if (replaceUnicodeEmoji) transforms.push(transformUnicodeEmoji) transforms.push(replaceCustomEmoji(options.emojis ?? {})) } if (markdown) transforms.push(transformMarkdown) if (mentions?.length) transforms.push(createTransformNamedMentions(mentions)) if (convertMentionLink) transforms.push(transformMentionLink) transforms.push(transformParagraphs) if (collapseMentionLink) transforms.push(transformCollapseMentions(status, inReplyToStatus)) return transformSync(parse(html), transforms) } /** * Converts raw HTML form Mastodon server to HTML for Tiptap editor */ export function convertMastodonHTML(html: string, customEmojis: Record<string, mastodon.v1.CustomEmoji> = {}) { const tree = parseMastodonHTML(html, { emojis: customEmojis, markdown: true, convertMentionLink: true, }) return render(tree) } export function htmlToText(html: string) { try { const tree = parse(html) return (tree.children as Node[]).map(n => treeToText(n)).join('').trim() } catch (err) { console.error(err) return '' } } export function recursiveTreeToText(input: Node): string { if (input && input.children && input.children.length > 0) return input.children.map((n: Node) => recursiveTreeToText(n)).join('') else return treeToText(input) } const emojiIdNeedsWrappingRE = /^(\d|\w|-|_)+$/ export function treeToText(input: Node): string { let pre = '' let body = '' let post = '' if (input.type === TEXT_NODE) return decode(input.value) if (input.name === 'br') return '\n' if (['p', 'pre'].includes(input.name)) pre = '\n' if (input.attributes?.['data-type'] === 'mention') { const acct = input.attributes['data-id'] if (acct) return acct.startsWith('@') ? acct : `@${acct}` } if (input.name === 'code') { if (input.parent?.name === 'pre') { const lang = input.attributes.class?.replace('language-', '') pre = `\`\`\`${lang || ''}\n` post = '\n```' } else { pre = '`' post = '`' } } else if (input.name === 'b' || input.name === 'strong') { pre = '**' post = '**' } else if (input.name === 'i' || input.name === 'em') { pre = '*' post = '*' } else if (input.name === 'del') { pre = '~~' post = '~~' } if ('children' in input) body = (input.children as Node[]).map(n => treeToText(n)).join('') if (input.name === 'img' || input.name === 'picture') { if (input.attributes.class?.includes('custom-emoji')) { const id = input.attributes['data-emoji-id'] ?? input.attributes.alt ?? input.attributes.title ?? 'unknown' return id.match(emojiIdNeedsWrappingRE) ? `:${id}:` : id } if (input.attributes.class?.includes('iconify-emoji')) return input.attributes.alt } return pre + body + post } // A tree transform function takes an ultrahtml Node object and returns // new content that will replace the given node in the tree. // Returning a null removes the node from the tree. // Strings get converted to text nodes. // The input node's children have been transformed before the node itself // gets transformed. type Transform = (node: Node, root: Node) => (Node | string)[] | Node | string | null // Helpers for transforming (filtering, modifying, ...) a parsed HTML tree // by running the given chain of transform functions one-by-one. function transformSync(doc: Node, transforms: Transform[]) { function
(node: Node, transform: Transform, root: Node) { if (Array.isArray(node.children)) { const children = [] as (Node | string)[] for (let i = 0; i < node.children.length; i++) { const result = visit(node.children[i], transform, root) if (Array.isArray(result)) children.push(...result) else if (result) children.push(result) } node.children = children.map((value) => { if (typeof value === 'string') return { type: TEXT_NODE, value, parent: node } value.parent = node return value }) } return transform(node, root) } for (const transform of transforms) doc = visit(doc, transform, doc) as Node return doc } // A tree transform for sanitizing elements & their attributes. type AttrSanitizers = Record<string, (value: string | undefined) => string | undefined> function sanitize(allowedElements: Record<string, AttrSanitizers>): Transform { return (node) => { if (node.type !== ELEMENT_NODE) return node if (!Object.prototype.hasOwnProperty.call(allowedElements, node.name)) return null const attrSanitizers = allowedElements[node.name] const attrs = {} as Record<string, string> for (const [name, func] of Object.entries(attrSanitizers)) { const value = func(node.attributes[name]) if (value !== undefined) attrs[name] = value } node.attributes = attrs return node } } function filterClasses(allowed: RegExp) { return (c: string | undefined) => { if (!c) return undefined return c.split(/\s/g).filter(cls => allowed.test(cls)).join(' ') } } function keep(value: string | undefined) { return value } function set(value: string) { return () => value } function filterHref() { const LINK_PROTOCOLS = new Set([ 'http:', 'https:', 'dat:', 'dweb:', 'ipfs:', 'ipns:', 'ssb:', 'gopher:', 'xmpp:', 'magnet:', 'gemini:', ]) return (href: string | undefined) => { if (href === undefined) return undefined // Allow relative links if (href.startsWith('/') || href.startsWith('.')) return href let url try { url = new URL(href) } catch (err) { if (err instanceof TypeError) return undefined throw err } if (LINK_PROTOCOLS.has(url.protocol)) return url.toString() return '#' } } function removeUnicodeEmoji(node: Node) { if (node.type !== TEXT_NODE) return node let start = 0 const matches = [] as (string | Node)[] findAndReplaceEmojisInText(emojiRegEx, node.value, (match, result) => { matches.push(result.slice(start).trimEnd()) start = result.length + match.match.length return undefined }) if (matches.length === 0) return node matches.push(node.value.slice(start)) return matches.filter(Boolean) } function transformUnicodeEmoji(node: Node) { if (node.type !== TEXT_NODE) return node let start = 0 const matches = [] as (string | Node)[] findAndReplaceEmojisInText(emojiRegEx, node.value, (match, result) => { const attrs = getEmojiAttributes(match) matches.push(result.slice(start)) matches.push(h('img', { src: attrs.src, alt: attrs.alt, class: attrs.class })) start = result.length + match.match.length return undefined }) if (matches.length === 0) return node matches.push(node.value.slice(start)) return matches.filter(Boolean) } function removeCustomEmoji(customEmojis: Record<string, mastodon.v1.CustomEmoji>): Transform { return (node) => { if (node.type !== TEXT_NODE) return node const split = node.value.split(/\s?:([\w-]+?):/g) if (split.length === 1) return node return split.map((name, i) => { if (i % 2 === 0) return name const emoji = customEmojis[name] as mastodon.v1.CustomEmoji if (!emoji) return `:${name}:` return '' }).filter(Boolean) } } function replaceCustomEmoji(customEmojis: Record<string, mastodon.v1.CustomEmoji>): Transform { return (node) => { if (node.type !== TEXT_NODE) return node const split = node.value.split(/:([\w-]+?):/g) if (split.length === 1) return node return split.map((name, i) => { if (i % 2 === 0) return name const emoji = customEmojis[name] as mastodon.v1.CustomEmoji if (!emoji) return `:${name}:` return h( 'picture', { 'alt': `:${name}:`, 'class': 'custom-emoji', 'data-emoji-id': name, }, [ h( 'source', { srcset: emoji.staticUrl, media: '(prefers-reduced-motion: reduce)', }, ), h( 'img', { src: emoji.url, alt: `:${name}:`, }, ), ], ) }).filter(Boolean) } } const _markdownReplacements: [RegExp, (c: (string | Node)[]) => Node][] = [ [/\*\*\*(.*?)\*\*\*/g, ([c]) => h('b', null, [h('em', null, c)])], [/\*\*(.*?)\*\*/g, c => h('b', null, c)], [/\*(.*?)\*/g, c => h('em', null, c)], [/~~(.*?)~~/g, c => h('del', null, c)], [/`([^`]+?)`/g, c => h('code', null, c)], // transform @username@twitter.com as links [/\B@([a-zA-Z0-9_]+)@twitter\.com\b/gi, c => h('a', { href: `https://twitter.com/${c}`, target: '_blank', rel: 'nofollow noopener noreferrer', class: 'mention external' }, `@${c}@twitter.com`)], ] function _markdownProcess(value: string) { const results = [] as (string | Node)[] let start = 0 while (true) { let found: { match: RegExpMatchArray; replacer: (c: (string | Node)[]) => Node } | undefined for (const [re, replacer] of _markdownReplacements) { re.lastIndex = start const match = re.exec(value) if (match) { if (!found || match.index < found.match.index!) found = { match, replacer } } } if (!found) break results.push(value.slice(start, found.match.index)) results.push(found.replacer(_markdownProcess(found.match[1]))) start = found.match.index! + found.match[0].length } results.push(value.slice(start)) return results.filter(Boolean) } function transformMarkdown(node: Node) { if (node.type !== TEXT_NODE) return node return _markdownProcess(node.value) } function transformParagraphs(node: Node): Node | Node[] { // For top level paragraphs, inject an empty <p> to preserve status paragraphs in our editor (except for the last one) if (node.parent?.type === DOCUMENT_NODE && node.name === 'p' && node.parent.children.at(-1) !== node) return [node, h('p')] return node } function isMention(node: Node) { const child = node.children?.length === 1 ? node.children[0] : null return Boolean(child?.name === 'a' && child.attributes.class?.includes('mention')) } function isSpacing(node: Node) { return node.type === TEXT_NODE && !node.value.trim() } // Extract the username from a known mention node function getMentionHandle(node: Node): string | undefined { return hrefToHandle(node.children?.[0].attributes.href) ?? node.children?.[0]?.children?.[0]?.attributes?.['data-id'] } function transformCollapseMentions(status?: mastodon.v1.Status, inReplyToStatus?: mastodon.v1.Status): Transform { let processed = false return (node: Node, root: Node): Node | Node[] => { if (processed || node.parent !== root || !node.children) return node const mentions: (Node | undefined)[] = [] const children = node.children as Node[] let trimContentStart: (() => void) | undefined for (const child of children) { // mention if (isMention(child)) { mentions.push(child) } // spaces in between else if (isSpacing(child)) { mentions.push(child) } // other content, stop collapsing else { if (child.type === TEXT_NODE) { trimContentStart = () => { child.value = child.value.trimStart() } } // remove <br> after mention if (child.name === 'br') mentions.push(undefined) break } } processed = true if (mentions.length === 0) return node let mentionsCount = 0 let contextualMentionsCount = 0 let removeNextSpacing = false const contextualMentions = mentions.filter((mention) => { if (!mention) return false if (removeNextSpacing && isSpacing(mention)) { removeNextSpacing = false return false } if (isMention(mention)) { mentionsCount++ if (inReplyToStatus) { const mentionHandle = getMentionHandle(mention) if (inReplyToStatus.account.acct === mentionHandle || inReplyToStatus.mentions.some(m => m.acct === mentionHandle)) { removeNextSpacing = true return false } } contextualMentionsCount++ } return true }) as Node[] // We have a special case for single mentions that are part of a reply. // We already have the replying to badge in this case or the status is connected to the previous one. // This is needed because the status doesn't included the in Reply to handle, only the account id. // But this covers the majority of cases. const showMentions = !(contextualMentionsCount === 0 || (mentionsCount === 1 && status?.inReplyToAccountId)) const grouped = contextualMentionsCount > 2 if (!showMentions || grouped) trimContentStart?.() const contextualChildren = children.slice(mentions.length) const mentionNodes = showMentions ? (grouped ? [h('mention-group', null, ...contextualMentions)] : contextualMentions) : [] return { ...node, children: [...mentionNodes, ...contextualChildren], } } } function hrefToHandle(href: string): string | undefined { const matchUser = href.match(UserLinkRE) if (matchUser) { const [, server, username] = matchUser return `${username}@${server.replace(/(.+\.)(.+\..+)/, '$2')}` } } function transformMentionLink(node: Node): string | Node | (string | Node)[] | null { if (node.name === 'a' && node.attributes.class?.includes('mention')) { const href = node.attributes.href if (href) { const handle = hrefToHandle(href) if (handle) { // convert to Tiptap mention node return h('span', { 'data-type': 'mention', 'data-id': handle }, handle) } } } return node } function createTransformNamedMentions(mentions: mastodon.v1.StatusMention[]) { return (node: Node): string | Node | (string | Node)[] | null => { if (node.name === 'a' && node.attributes.class?.includes('mention')) { const href = node.attributes.href const mention = href && mentions.find(m => m.url === href) if (mention) { node.attributes.href = `/${currentServer.value}/@${mention.acct}` node.children = [h('span', { 'data-type': 'mention', 'data-id': mention.acct }, `@${mention.username}`)] return node } } return node } }
visit
identifier_name
content-parse.ts
// @unimport-disable import type { mastodon } from 'masto' import type { Node } from 'ultrahtml' import { DOCUMENT_NODE, ELEMENT_NODE, TEXT_NODE, h, parse, render } from 'ultrahtml' import { findAndReplaceEmojisInText } from '@iconify/utils' import { decode } from 'tiny-decode' import { emojiRegEx, getEmojiAttributes } from '../config/emojis' export interface ContentParseOptions { emojis?: Record<string, mastodon.v1.CustomEmoji> hideEmojis?: boolean mentions?: mastodon.v1.StatusMention[] markdown?: boolean replaceUnicodeEmoji?: boolean astTransforms?: Transform[] convertMentionLink?: boolean collapseMentionLink?: boolean status?: mastodon.v1.Status inReplyToStatus?: mastodon.v1.Status } const sanitizerBasicClasses = filterClasses(/^(h-\S*|p-\S*|u-\S*|dt-\S*|e-\S*|mention|hashtag|ellipsis|invisible)$/u) const sanitizer = sanitize({ // Allow basic elements as seen in https://github.com/mastodon/mastodon/blob/17f79082b098e05b68d6f0d38fabb3ac121879a9/lib/sanitize_ext/sanitize_config.rb br: {}, p: {}, a: { href: filterHref(), class: sanitizerBasicClasses, rel: set('nofollow noopener noreferrer'), target: set('_blank'), }, span: { class: sanitizerBasicClasses, }, // Allow elements potentially created for Markdown code blocks above pre: {}, code: { class: filterClasses(/^language-\w+$/), }, // Other elements supported in glitch, as seen in // https://github.com/glitch-soc/mastodon/blob/13227e1dafd308dfe1a3effc3379b766274809b3/lib/sanitize_ext/sanitize_config.rb#L75 abbr: { title: keep, }, del: {}, blockquote: { cite: filterHref(), }, b: {}, strong: {}, u: {}, sub: {}, sup: {}, i: {}, em: {}, h1: {}, h2: {}, h3: {}, h4: {}, h5: {}, ul: {}, ol: { start: keep, reversed: keep, }, li: { value: keep, }, }) /** * Parse raw HTML form Mastodon server to AST, * with interop of custom emojis and inline Markdown syntax */ export function parseMastodonHTML( html: string, options: ContentParseOptions = {}, ) { const { markdown = true, replaceUnicodeEmoji = true, convertMentionLink = false, collapseMentionLink = false, hideEmojis = false, mentions, status, inReplyToStatus, } = options if (markdown) { // Handle code blocks html = html .replace(/>(```|~~~)(\w*)([\s\S]+?)\1/g, (_1, _2, lang: string, raw: string) => { const code = htmlToText(raw) .replace(/</g, '&lt;') .replace(/>/g, '&gt;') .replace(/`/g, '&#96;') const classes = lang ? ` class="language-${lang}"` : '' return `><pre><code${classes}>${code}</code></pre>` }) .replace(/`([^`\n]*)`/g, (_1, raw) => { return raw ? `<code>${htmlToText(raw).replace(/</g, '&lt;').replace(/>/g, '&gt;')}</code>` : '' }) } // Always sanitize the raw HTML data *after* it has been modified const transforms: Transform[] = [ sanitizer, ...options.astTransforms || [], ] if (hideEmojis) { transforms.push(removeUnicodeEmoji) transforms.push(removeCustomEmoji(options.emojis ?? {})) } else { if (replaceUnicodeEmoji) transforms.push(transformUnicodeEmoji) transforms.push(replaceCustomEmoji(options.emojis ?? {})) } if (markdown) transforms.push(transformMarkdown) if (mentions?.length) transforms.push(createTransformNamedMentions(mentions)) if (convertMentionLink) transforms.push(transformMentionLink) transforms.push(transformParagraphs) if (collapseMentionLink) transforms.push(transformCollapseMentions(status, inReplyToStatus)) return transformSync(parse(html), transforms) } /** * Converts raw HTML form Mastodon server to HTML for Tiptap editor */ export function convertMastodonHTML(html: string, customEmojis: Record<string, mastodon.v1.CustomEmoji> = {}) { const tree = parseMastodonHTML(html, { emojis: customEmojis, markdown: true, convertMentionLink: true, }) return render(tree) } export function htmlToText(html: string) { try { const tree = parse(html) return (tree.children as Node[]).map(n => treeToText(n)).join('').trim() } catch (err) { console.error(err) return '' } } export function recursiveTreeToText(input: Node): string { if (input && input.children && input.children.length > 0) return input.children.map((n: Node) => recursiveTreeToText(n)).join('') else return treeToText(input) } const emojiIdNeedsWrappingRE = /^(\d|\w|-|_)+$/ export function treeToText(input: Node): string { let pre = '' let body = '' let post = '' if (input.type === TEXT_NODE) return decode(input.value) if (input.name === 'br') return '\n' if (['p', 'pre'].includes(input.name)) pre = '\n' if (input.attributes?.['data-type'] === 'mention') { const acct = input.attributes['data-id'] if (acct) return acct.startsWith('@') ? acct : `@${acct}` } if (input.name === 'code') { if (input.parent?.name === 'pre') { const lang = input.attributes.class?.replace('language-', '') pre = `\`\`\`${lang || ''}\n` post = '\n```' } else { pre = '`' post = '`' } } else if (input.name === 'b' || input.name === 'strong') { pre = '**' post = '**' } else if (input.name === 'i' || input.name === 'em') { pre = '*' post = '*' } else if (input.name === 'del') { pre = '~~' post = '~~' } if ('children' in input) body = (input.children as Node[]).map(n => treeToText(n)).join('') if (input.name === 'img' || input.name === 'picture') { if (input.attributes.class?.includes('custom-emoji')) { const id = input.attributes['data-emoji-id'] ?? input.attributes.alt ?? input.attributes.title ?? 'unknown' return id.match(emojiIdNeedsWrappingRE) ? `:${id}:` : id } if (input.attributes.class?.includes('iconify-emoji')) return input.attributes.alt } return pre + body + post } // A tree transform function takes an ultrahtml Node object and returns // new content that will replace the given node in the tree. // Returning a null removes the node from the tree. // Strings get converted to text nodes. // The input node's children have been transformed before the node itself // gets transformed. type Transform = (node: Node, root: Node) => (Node | string)[] | Node | string | null // Helpers for transforming (filtering, modifying, ...) a parsed HTML tree // by running the given chain of transform functions one-by-one. function transformSync(doc: Node, transforms: Transform[]) { function visit(node: Node, transform: Transform, root: Node) { if (Array.isArray(node.children)) { const children = [] as (Node | string)[] for (let i = 0; i < node.children.length; i++) { const result = visit(node.children[i], transform, root) if (Array.isArray(result)) children.push(...result) else if (result) children.push(result) } node.children = children.map((value) => { if (typeof value === 'string') return { type: TEXT_NODE, value, parent: node } value.parent = node return value }) } return transform(node, root) } for (const transform of transforms) doc = visit(doc, transform, doc) as Node return doc } // A tree transform for sanitizing elements & their attributes. type AttrSanitizers = Record<string, (value: string | undefined) => string | undefined> function sanitize(allowedElements: Record<string, AttrSanitizers>): Transform { return (node) => { if (node.type !== ELEMENT_NODE) return node if (!Object.prototype.hasOwnProperty.call(allowedElements, node.name)) return null const attrSanitizers = allowedElements[node.name] const attrs = {} as Record<string, string> for (const [name, func] of Object.entries(attrSanitizers)) { const value = func(node.attributes[name]) if (value !== undefined) attrs[name] = value } node.attributes = attrs return node } } function filterClasses(allowed: RegExp) { return (c: string | undefined) => { if (!c) return undefined return c.split(/\s/g).filter(cls => allowed.test(cls)).join(' ') } } function keep(value: string | undefined) { return value } function set(value: string) { return () => value } function filterHref() { const LINK_PROTOCOLS = new Set([ 'http:', 'https:', 'dat:', 'dweb:', 'ipfs:', 'ipns:', 'ssb:', 'gopher:', 'xmpp:', 'magnet:', 'gemini:', ]) return (href: string | undefined) => { if (href === undefined) return undefined // Allow relative links if (href.startsWith('/') || href.startsWith('.')) return href let url try { url = new URL(href) } catch (err) { if (err instanceof TypeError) return undefined throw err } if (LINK_PROTOCOLS.has(url.protocol)) return url.toString() return '#' } } function removeUnicodeEmoji(node: Node) { if (node.type !== TEXT_NODE) return node let start = 0 const matches = [] as (string | Node)[] findAndReplaceEmojisInText(emojiRegEx, node.value, (match, result) => { matches.push(result.slice(start).trimEnd()) start = result.length + match.match.length return undefined }) if (matches.length === 0) return node matches.push(node.value.slice(start)) return matches.filter(Boolean) } function transformUnicodeEmoji(node: Node) { if (node.type !== TEXT_NODE) return node let start = 0 const matches = [] as (string | Node)[] findAndReplaceEmojisInText(emojiRegEx, node.value, (match, result) => { const attrs = getEmojiAttributes(match) matches.push(result.slice(start)) matches.push(h('img', { src: attrs.src, alt: attrs.alt, class: attrs.class })) start = result.length + match.match.length return undefined }) if (matches.length === 0) return node matches.push(node.value.slice(start)) return matches.filter(Boolean) } function removeCustomEmoji(customEmojis: Record<string, mastodon.v1.CustomEmoji>): Transform
function replaceCustomEmoji(customEmojis: Record<string, mastodon.v1.CustomEmoji>): Transform { return (node) => { if (node.type !== TEXT_NODE) return node const split = node.value.split(/:([\w-]+?):/g) if (split.length === 1) return node return split.map((name, i) => { if (i % 2 === 0) return name const emoji = customEmojis[name] as mastodon.v1.CustomEmoji if (!emoji) return `:${name}:` return h( 'picture', { 'alt': `:${name}:`, 'class': 'custom-emoji', 'data-emoji-id': name, }, [ h( 'source', { srcset: emoji.staticUrl, media: '(prefers-reduced-motion: reduce)', }, ), h( 'img', { src: emoji.url, alt: `:${name}:`, }, ), ], ) }).filter(Boolean) } } const _markdownReplacements: [RegExp, (c: (string | Node)[]) => Node][] = [ [/\*\*\*(.*?)\*\*\*/g, ([c]) => h('b', null, [h('em', null, c)])], [/\*\*(.*?)\*\*/g, c => h('b', null, c)], [/\*(.*?)\*/g, c => h('em', null, c)], [/~~(.*?)~~/g, c => h('del', null, c)], [/`([^`]+?)`/g, c => h('code', null, c)], // transform @username@twitter.com as links [/\B@([a-zA-Z0-9_]+)@twitter\.com\b/gi, c => h('a', { href: `https://twitter.com/${c}`, target: '_blank', rel: 'nofollow noopener noreferrer', class: 'mention external' }, `@${c}@twitter.com`)], ] function _markdownProcess(value: string) { const results = [] as (string | Node)[] let start = 0 while (true) { let found: { match: RegExpMatchArray; replacer: (c: (string | Node)[]) => Node } | undefined for (const [re, replacer] of _markdownReplacements) { re.lastIndex = start const match = re.exec(value) if (match) { if (!found || match.index < found.match.index!) found = { match, replacer } } } if (!found) break results.push(value.slice(start, found.match.index)) results.push(found.replacer(_markdownProcess(found.match[1]))) start = found.match.index! + found.match[0].length } results.push(value.slice(start)) return results.filter(Boolean) } function transformMarkdown(node: Node) { if (node.type !== TEXT_NODE) return node return _markdownProcess(node.value) } function transformParagraphs(node: Node): Node | Node[] { // For top level paragraphs, inject an empty <p> to preserve status paragraphs in our editor (except for the last one) if (node.parent?.type === DOCUMENT_NODE && node.name === 'p' && node.parent.children.at(-1) !== node) return [node, h('p')] return node } function isMention(node: Node) { const child = node.children?.length === 1 ? node.children[0] : null return Boolean(child?.name === 'a' && child.attributes.class?.includes('mention')) } function isSpacing(node: Node) { return node.type === TEXT_NODE && !node.value.trim() } // Extract the username from a known mention node function getMentionHandle(node: Node): string | undefined { return hrefToHandle(node.children?.[0].attributes.href) ?? node.children?.[0]?.children?.[0]?.attributes?.['data-id'] } function transformCollapseMentions(status?: mastodon.v1.Status, inReplyToStatus?: mastodon.v1.Status): Transform { let processed = false return (node: Node, root: Node): Node | Node[] => { if (processed || node.parent !== root || !node.children) return node const mentions: (Node | undefined)[] = [] const children = node.children as Node[] let trimContentStart: (() => void) | undefined for (const child of children) { // mention if (isMention(child)) { mentions.push(child) } // spaces in between else if (isSpacing(child)) { mentions.push(child) } // other content, stop collapsing else { if (child.type === TEXT_NODE) { trimContentStart = () => { child.value = child.value.trimStart() } } // remove <br> after mention if (child.name === 'br') mentions.push(undefined) break } } processed = true if (mentions.length === 0) return node let mentionsCount = 0 let contextualMentionsCount = 0 let removeNextSpacing = false const contextualMentions = mentions.filter((mention) => { if (!mention) return false if (removeNextSpacing && isSpacing(mention)) { removeNextSpacing = false return false } if (isMention(mention)) { mentionsCount++ if (inReplyToStatus) { const mentionHandle = getMentionHandle(mention) if (inReplyToStatus.account.acct === mentionHandle || inReplyToStatus.mentions.some(m => m.acct === mentionHandle)) { removeNextSpacing = true return false } } contextualMentionsCount++ } return true }) as Node[] // We have a special case for single mentions that are part of a reply. // We already have the replying to badge in this case or the status is connected to the previous one. // This is needed because the status doesn't included the in Reply to handle, only the account id. // But this covers the majority of cases. const showMentions = !(contextualMentionsCount === 0 || (mentionsCount === 1 && status?.inReplyToAccountId)) const grouped = contextualMentionsCount > 2 if (!showMentions || grouped) trimContentStart?.() const contextualChildren = children.slice(mentions.length) const mentionNodes = showMentions ? (grouped ? [h('mention-group', null, ...contextualMentions)] : contextualMentions) : [] return { ...node, children: [...mentionNodes, ...contextualChildren], } } } function hrefToHandle(href: string): string | undefined { const matchUser = href.match(UserLinkRE) if (matchUser) { const [, server, username] = matchUser return `${username}@${server.replace(/(.+\.)(.+\..+)/, '$2')}` } } function transformMentionLink(node: Node): string | Node | (string | Node)[] | null { if (node.name === 'a' && node.attributes.class?.includes('mention')) { const href = node.attributes.href if (href) { const handle = hrefToHandle(href) if (handle) { // convert to Tiptap mention node return h('span', { 'data-type': 'mention', 'data-id': handle }, handle) } } } return node } function createTransformNamedMentions(mentions: mastodon.v1.StatusMention[]) { return (node: Node): string | Node | (string | Node)[] | null => { if (node.name === 'a' && node.attributes.class?.includes('mention')) { const href = node.attributes.href const mention = href && mentions.find(m => m.url === href) if (mention) { node.attributes.href = `/${currentServer.value}/@${mention.acct}` node.children = [h('span', { 'data-type': 'mention', 'data-id': mention.acct }, `@${mention.username}`)] return node } } return node } }
{ return (node) => { if (node.type !== TEXT_NODE) return node const split = node.value.split(/\s?:([\w-]+?):/g) if (split.length === 1) return node return split.map((name, i) => { if (i % 2 === 0) return name const emoji = customEmojis[name] as mastodon.v1.CustomEmoji if (!emoji) return `:${name}:` return '' }).filter(Boolean) } }
identifier_body
content-parse.ts
// @unimport-disable import type { mastodon } from 'masto' import type { Node } from 'ultrahtml' import { DOCUMENT_NODE, ELEMENT_NODE, TEXT_NODE, h, parse, render } from 'ultrahtml' import { findAndReplaceEmojisInText } from '@iconify/utils' import { decode } from 'tiny-decode' import { emojiRegEx, getEmojiAttributes } from '../config/emojis' export interface ContentParseOptions { emojis?: Record<string, mastodon.v1.CustomEmoji> hideEmojis?: boolean mentions?: mastodon.v1.StatusMention[] markdown?: boolean replaceUnicodeEmoji?: boolean astTransforms?: Transform[] convertMentionLink?: boolean collapseMentionLink?: boolean status?: mastodon.v1.Status inReplyToStatus?: mastodon.v1.Status } const sanitizerBasicClasses = filterClasses(/^(h-\S*|p-\S*|u-\S*|dt-\S*|e-\S*|mention|hashtag|ellipsis|invisible)$/u) const sanitizer = sanitize({ // Allow basic elements as seen in https://github.com/mastodon/mastodon/blob/17f79082b098e05b68d6f0d38fabb3ac121879a9/lib/sanitize_ext/sanitize_config.rb br: {}, p: {}, a: { href: filterHref(), class: sanitizerBasicClasses, rel: set('nofollow noopener noreferrer'), target: set('_blank'), }, span: { class: sanitizerBasicClasses, }, // Allow elements potentially created for Markdown code blocks above pre: {}, code: { class: filterClasses(/^language-\w+$/), }, // Other elements supported in glitch, as seen in // https://github.com/glitch-soc/mastodon/blob/13227e1dafd308dfe1a3effc3379b766274809b3/lib/sanitize_ext/sanitize_config.rb#L75 abbr: { title: keep, }, del: {}, blockquote: { cite: filterHref(), }, b: {}, strong: {}, u: {}, sub: {}, sup: {}, i: {}, em: {}, h1: {}, h2: {}, h3: {}, h4: {}, h5: {}, ul: {}, ol: { start: keep, reversed: keep, }, li: { value: keep, }, }) /** * Parse raw HTML form Mastodon server to AST, * with interop of custom emojis and inline Markdown syntax */ export function parseMastodonHTML( html: string, options: ContentParseOptions = {}, ) { const { markdown = true, replaceUnicodeEmoji = true, convertMentionLink = false, collapseMentionLink = false, hideEmojis = false, mentions, status, inReplyToStatus, } = options if (markdown) { // Handle code blocks html = html .replace(/>(```|~~~)(\w*)([\s\S]+?)\1/g, (_1, _2, lang: string, raw: string) => { const code = htmlToText(raw) .replace(/</g, '&lt;') .replace(/>/g, '&gt;') .replace(/`/g, '&#96;') const classes = lang ? ` class="language-${lang}"` : '' return `><pre><code${classes}>${code}</code></pre>` }) .replace(/`([^`\n]*)`/g, (_1, raw) => { return raw ? `<code>${htmlToText(raw).replace(/</g, '&lt;').replace(/>/g, '&gt;')}</code>` : '' }) } // Always sanitize the raw HTML data *after* it has been modified const transforms: Transform[] = [ sanitizer, ...options.astTransforms || [], ] if (hideEmojis) { transforms.push(removeUnicodeEmoji) transforms.push(removeCustomEmoji(options.emojis ?? {})) } else { if (replaceUnicodeEmoji) transforms.push(transformUnicodeEmoji) transforms.push(replaceCustomEmoji(options.emojis ?? {})) } if (markdown) transforms.push(transformMarkdown) if (mentions?.length) transforms.push(createTransformNamedMentions(mentions)) if (convertMentionLink) transforms.push(transformMentionLink) transforms.push(transformParagraphs) if (collapseMentionLink) transforms.push(transformCollapseMentions(status, inReplyToStatus)) return transformSync(parse(html), transforms) } /** * Converts raw HTML form Mastodon server to HTML for Tiptap editor */ export function convertMastodonHTML(html: string, customEmojis: Record<string, mastodon.v1.CustomEmoji> = {}) { const tree = parseMastodonHTML(html, { emojis: customEmojis, markdown: true, convertMentionLink: true, }) return render(tree) } export function htmlToText(html: string) { try { const tree = parse(html) return (tree.children as Node[]).map(n => treeToText(n)).join('').trim() } catch (err) { console.error(err) return '' } } export function recursiveTreeToText(input: Node): string { if (input && input.children && input.children.length > 0) return input.children.map((n: Node) => recursiveTreeToText(n)).join('') else return treeToText(input) } const emojiIdNeedsWrappingRE = /^(\d|\w|-|_)+$/ export function treeToText(input: Node): string { let pre = '' let body = '' let post = '' if (input.type === TEXT_NODE) return decode(input.value) if (input.name === 'br') return '\n' if (['p', 'pre'].includes(input.name)) pre = '\n' if (input.attributes?.['data-type'] === 'mention') { const acct = input.attributes['data-id'] if (acct) return acct.startsWith('@') ? acct : `@${acct}` } if (input.name === 'code') { if (input.parent?.name === 'pre') { const lang = input.attributes.class?.replace('language-', '') pre = `\`\`\`${lang || ''}\n` post = '\n```' } else { pre = '`' post = '`' } } else if (input.name === 'b' || input.name === 'strong') { pre = '**' post = '**' } else if (input.name === 'i' || input.name === 'em') { pre = '*' post = '*' } else if (input.name === 'del') { pre = '~~' post = '~~' } if ('children' in input) body = (input.children as Node[]).map(n => treeToText(n)).join('') if (input.name === 'img' || input.name === 'picture') { if (input.attributes.class?.includes('custom-emoji')) { const id = input.attributes['data-emoji-id'] ?? input.attributes.alt ?? input.attributes.title ?? 'unknown' return id.match(emojiIdNeedsWrappingRE) ? `:${id}:` : id } if (input.attributes.class?.includes('iconify-emoji')) return input.attributes.alt } return pre + body + post } // A tree transform function takes an ultrahtml Node object and returns // new content that will replace the given node in the tree. // Returning a null removes the node from the tree. // Strings get converted to text nodes. // The input node's children have been transformed before the node itself // gets transformed. type Transform = (node: Node, root: Node) => (Node | string)[] | Node | string | null // Helpers for transforming (filtering, modifying, ...) a parsed HTML tree // by running the given chain of transform functions one-by-one. function transformSync(doc: Node, transforms: Transform[]) { function visit(node: Node, transform: Transform, root: Node) { if (Array.isArray(node.children)) { const children = [] as (Node | string)[] for (let i = 0; i < node.children.length; i++) { const result = visit(node.children[i], transform, root) if (Array.isArray(result)) children.push(...result) else if (result) children.push(result) } node.children = children.map((value) => { if (typeof value === 'string') return { type: TEXT_NODE, value, parent: node } value.parent = node return value }) } return transform(node, root) } for (const transform of transforms) doc = visit(doc, transform, doc) as Node return doc } // A tree transform for sanitizing elements & their attributes. type AttrSanitizers = Record<string, (value: string | undefined) => string | undefined> function sanitize(allowedElements: Record<string, AttrSanitizers>): Transform { return (node) => { if (node.type !== ELEMENT_NODE) return node if (!Object.prototype.hasOwnProperty.call(allowedElements, node.name)) return null const attrSanitizers = allowedElements[node.name] const attrs = {} as Record<string, string> for (const [name, func] of Object.entries(attrSanitizers)) { const value = func(node.attributes[name]) if (value !== undefined) attrs[name] = value } node.attributes = attrs return node } } function filterClasses(allowed: RegExp) { return (c: string | undefined) => { if (!c) return undefined return c.split(/\s/g).filter(cls => allowed.test(cls)).join(' ') } } function keep(value: string | undefined) { return value } function set(value: string) { return () => value } function filterHref() { const LINK_PROTOCOLS = new Set([ 'http:', 'https:', 'dat:', 'dweb:', 'ipfs:', 'ipns:', 'ssb:', 'gopher:', 'xmpp:', 'magnet:', 'gemini:', ]) return (href: string | undefined) => { if (href === undefined) return undefined // Allow relative links if (href.startsWith('/') || href.startsWith('.')) return href let url try { url = new URL(href) } catch (err) { if (err instanceof TypeError) return undefined throw err } if (LINK_PROTOCOLS.has(url.protocol)) return url.toString() return '#' } }
let start = 0 const matches = [] as (string | Node)[] findAndReplaceEmojisInText(emojiRegEx, node.value, (match, result) => { matches.push(result.slice(start).trimEnd()) start = result.length + match.match.length return undefined }) if (matches.length === 0) return node matches.push(node.value.slice(start)) return matches.filter(Boolean) } function transformUnicodeEmoji(node: Node) { if (node.type !== TEXT_NODE) return node let start = 0 const matches = [] as (string | Node)[] findAndReplaceEmojisInText(emojiRegEx, node.value, (match, result) => { const attrs = getEmojiAttributes(match) matches.push(result.slice(start)) matches.push(h('img', { src: attrs.src, alt: attrs.alt, class: attrs.class })) start = result.length + match.match.length return undefined }) if (matches.length === 0) return node matches.push(node.value.slice(start)) return matches.filter(Boolean) } function removeCustomEmoji(customEmojis: Record<string, mastodon.v1.CustomEmoji>): Transform { return (node) => { if (node.type !== TEXT_NODE) return node const split = node.value.split(/\s?:([\w-]+?):/g) if (split.length === 1) return node return split.map((name, i) => { if (i % 2 === 0) return name const emoji = customEmojis[name] as mastodon.v1.CustomEmoji if (!emoji) return `:${name}:` return '' }).filter(Boolean) } } function replaceCustomEmoji(customEmojis: Record<string, mastodon.v1.CustomEmoji>): Transform { return (node) => { if (node.type !== TEXT_NODE) return node const split = node.value.split(/:([\w-]+?):/g) if (split.length === 1) return node return split.map((name, i) => { if (i % 2 === 0) return name const emoji = customEmojis[name] as mastodon.v1.CustomEmoji if (!emoji) return `:${name}:` return h( 'picture', { 'alt': `:${name}:`, 'class': 'custom-emoji', 'data-emoji-id': name, }, [ h( 'source', { srcset: emoji.staticUrl, media: '(prefers-reduced-motion: reduce)', }, ), h( 'img', { src: emoji.url, alt: `:${name}:`, }, ), ], ) }).filter(Boolean) } } const _markdownReplacements: [RegExp, (c: (string | Node)[]) => Node][] = [ [/\*\*\*(.*?)\*\*\*/g, ([c]) => h('b', null, [h('em', null, c)])], [/\*\*(.*?)\*\*/g, c => h('b', null, c)], [/\*(.*?)\*/g, c => h('em', null, c)], [/~~(.*?)~~/g, c => h('del', null, c)], [/`([^`]+?)`/g, c => h('code', null, c)], // transform @username@twitter.com as links [/\B@([a-zA-Z0-9_]+)@twitter\.com\b/gi, c => h('a', { href: `https://twitter.com/${c}`, target: '_blank', rel: 'nofollow noopener noreferrer', class: 'mention external' }, `@${c}@twitter.com`)], ] function _markdownProcess(value: string) { const results = [] as (string | Node)[] let start = 0 while (true) { let found: { match: RegExpMatchArray; replacer: (c: (string | Node)[]) => Node } | undefined for (const [re, replacer] of _markdownReplacements) { re.lastIndex = start const match = re.exec(value) if (match) { if (!found || match.index < found.match.index!) found = { match, replacer } } } if (!found) break results.push(value.slice(start, found.match.index)) results.push(found.replacer(_markdownProcess(found.match[1]))) start = found.match.index! + found.match[0].length } results.push(value.slice(start)) return results.filter(Boolean) } function transformMarkdown(node: Node) { if (node.type !== TEXT_NODE) return node return _markdownProcess(node.value) } function transformParagraphs(node: Node): Node | Node[] { // For top level paragraphs, inject an empty <p> to preserve status paragraphs in our editor (except for the last one) if (node.parent?.type === DOCUMENT_NODE && node.name === 'p' && node.parent.children.at(-1) !== node) return [node, h('p')] return node } function isMention(node: Node) { const child = node.children?.length === 1 ? node.children[0] : null return Boolean(child?.name === 'a' && child.attributes.class?.includes('mention')) } function isSpacing(node: Node) { return node.type === TEXT_NODE && !node.value.trim() } // Extract the username from a known mention node function getMentionHandle(node: Node): string | undefined { return hrefToHandle(node.children?.[0].attributes.href) ?? node.children?.[0]?.children?.[0]?.attributes?.['data-id'] } function transformCollapseMentions(status?: mastodon.v1.Status, inReplyToStatus?: mastodon.v1.Status): Transform { let processed = false return (node: Node, root: Node): Node | Node[] => { if (processed || node.parent !== root || !node.children) return node const mentions: (Node | undefined)[] = [] const children = node.children as Node[] let trimContentStart: (() => void) | undefined for (const child of children) { // mention if (isMention(child)) { mentions.push(child) } // spaces in between else if (isSpacing(child)) { mentions.push(child) } // other content, stop collapsing else { if (child.type === TEXT_NODE) { trimContentStart = () => { child.value = child.value.trimStart() } } // remove <br> after mention if (child.name === 'br') mentions.push(undefined) break } } processed = true if (mentions.length === 0) return node let mentionsCount = 0 let contextualMentionsCount = 0 let removeNextSpacing = false const contextualMentions = mentions.filter((mention) => { if (!mention) return false if (removeNextSpacing && isSpacing(mention)) { removeNextSpacing = false return false } if (isMention(mention)) { mentionsCount++ if (inReplyToStatus) { const mentionHandle = getMentionHandle(mention) if (inReplyToStatus.account.acct === mentionHandle || inReplyToStatus.mentions.some(m => m.acct === mentionHandle)) { removeNextSpacing = true return false } } contextualMentionsCount++ } return true }) as Node[] // We have a special case for single mentions that are part of a reply. // We already have the replying to badge in this case or the status is connected to the previous one. // This is needed because the status doesn't included the in Reply to handle, only the account id. // But this covers the majority of cases. const showMentions = !(contextualMentionsCount === 0 || (mentionsCount === 1 && status?.inReplyToAccountId)) const grouped = contextualMentionsCount > 2 if (!showMentions || grouped) trimContentStart?.() const contextualChildren = children.slice(mentions.length) const mentionNodes = showMentions ? (grouped ? [h('mention-group', null, ...contextualMentions)] : contextualMentions) : [] return { ...node, children: [...mentionNodes, ...contextualChildren], } } } function hrefToHandle(href: string): string | undefined { const matchUser = href.match(UserLinkRE) if (matchUser) { const [, server, username] = matchUser return `${username}@${server.replace(/(.+\.)(.+\..+)/, '$2')}` } } function transformMentionLink(node: Node): string | Node | (string | Node)[] | null { if (node.name === 'a' && node.attributes.class?.includes('mention')) { const href = node.attributes.href if (href) { const handle = hrefToHandle(href) if (handle) { // convert to Tiptap mention node return h('span', { 'data-type': 'mention', 'data-id': handle }, handle) } } } return node } function createTransformNamedMentions(mentions: mastodon.v1.StatusMention[]) { return (node: Node): string | Node | (string | Node)[] | null => { if (node.name === 'a' && node.attributes.class?.includes('mention')) { const href = node.attributes.href const mention = href && mentions.find(m => m.url === href) if (mention) { node.attributes.href = `/${currentServer.value}/@${mention.acct}` node.children = [h('span', { 'data-type': 'mention', 'data-id': mention.acct }, `@${mention.username}`)] return node } } return node } }
function removeUnicodeEmoji(node: Node) { if (node.type !== TEXT_NODE) return node
random_line_split
3rd_person.rs
//! Example 03. 3rd person walk simulator. //! //! Difficulty: Advanced. //! //! This example based on async example, because it requires to load decent amount of //! resources which might be slow on some machines. //! //! In this example we'll create simple 3rd person game with character that can idle, //! walk, or jump. //! //! Also this example demonstrates the power of animation blending machines. Animation //! blending machines are used in all modern games to create complex animations from set //! of simple ones. //! //! TODO: Improve explanations. Some places can be explained better. //! //! Known bugs: Sometimes character will jump, but jumping animations is not playing. //! //! Possible improvements: //! - Smart camera - camera which will not penetrate walls. //! - Separate animation machines for upper and lower body - upper machine might be //! for combat, lower - for locomotion. //! - Tons of them, this is simple example after all. pub mod shared; use crate::shared::{create_ui, fix_shadows_distance, Game, GameScene}; use std::time::Instant; use winit::keyboard::KeyCode; use fyrox::{ core::{ algebra::Vector2, log::{Log, MessageKind}, }, engine::GraphicsContext, event::{Event, WindowEvent}, event_loop::ControlFlow, gui::{ message::MessageDirection, progress_bar::ProgressBarMessage, text::TextMessage, widget::WidgetMessage, }, renderer::QualitySettings, utils::translate_event, }; fn
() { let (mut game, event_loop) = Game::new("Example 03 - 3rd person"); // Create simple user interface that will show some useful info. let interface = create_ui( &mut game.engine.user_interface.build_ctx(), Vector2::new(100.0, 100.0), ); let mut previous = Instant::now(); let fixed_timestep = 1.0 / 60.0; let mut lag = 0.0; // Finally run our event loop which will respond to OS and window events and update // engine state accordingly. event_loop.run(move |event, window_target, control_flow| { match event { Event::MainEventsCleared => { // This is main game loop - it has fixed time step which means that game // code will run at fixed speed even if renderer can't give you desired // 60 fps. let elapsed = previous.elapsed(); previous = Instant::now(); lag += elapsed.as_secs_f32(); while lag >= fixed_timestep { // ************************ // Put your game logic here. // ************************ // Check each frame if our scene is created - here we just trying to lock context // without blocking, it is important for main thread to be functional while other // thread still loading data. if let Ok(mut load_context) = game.load_context.as_ref().unwrap().try_lock() { if let Some(load_result) = load_context.scene_data.take() { // Add scene to engine - engine will take ownership over scene and will return // you a handle to scene which can be used later on to borrow it and do some // actions you need. game.game_scene = Some(GameScene { scene: game.engine.scenes.add(load_result.scene), player: load_result.player, }); // Once scene is loaded, we should hide progress bar and text. game.engine .user_interface .send_message(WidgetMessage::visibility( interface.progress_bar, MessageDirection::ToWidget, false, )); game.engine .user_interface .send_message(WidgetMessage::visibility( interface.progress_text, MessageDirection::ToWidget, false, )); } // Report progress in UI. game.engine .user_interface .send_message(ProgressBarMessage::progress( interface.progress_bar, MessageDirection::ToWidget, load_context.progress, )); game.engine.user_interface.send_message(TextMessage::text( interface.progress_text, MessageDirection::ToWidget, format!( "Loading scene: {}%\n{}", load_context.progress * 100.0, load_context.message ), )); } // Update scene only if it is loaded. if let Some(game_scene) = game.game_scene.as_mut() { // Use stored scene handle to borrow a mutable reference of scene in // engine. let scene = &mut game.engine.scenes[game_scene.scene]; game_scene.player.update(scene, fixed_timestep); } if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context { let debug_text = format!( "Example 03 - 3rd Person\n\ [W][S][A][D] - walk, [SPACE] - jump.\n\ Use [1][2][3][4] to select graphics quality.\n\ {}", ctx.renderer.get_statistics() ); game.engine.user_interface.send_message(TextMessage::text( interface.debug_text, MessageDirection::ToWidget, debug_text, )); } // It is very important to "pump" messages from UI. Even if don't need to // respond to such message, you should call this method, otherwise UI // might behave very weird. while let Some(_ui_event) = game.engine.user_interface.poll_message() { // ************************ // Put your data model synchronization code here. It should // take message and update data in your game according to // changes in UI. // ************************ } game.engine .update(fixed_timestep, control_flow, &mut lag, Default::default()); lag -= fixed_timestep; } // Rendering must be explicitly requested and handled after RedrawRequested event is received. if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context { ctx.window.request_redraw(); } } Event::Resumed => { game.engine .initialize_graphics_context(window_target) .unwrap(); } Event::Suspended => { game.engine.destroy_graphics_context().unwrap(); } Event::RedrawRequested(_) => { // Run renderer at max speed - it is not tied to game code. game.engine.render().unwrap(); } Event::LoopDestroyed => { println!("{:?}", fyrox::core::profiler::print()); } Event::WindowEvent { event, .. } => { match &event { WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit, WindowEvent::Resized(size) => { // It is very important to handle Resized event from window, because // renderer knows nothing about window size - it must be notified // directly when window size has changed. if let Err(e) = game.engine.set_frame_size((*size).into()) { Log::writeln( MessageKind::Error, format!("Unable to set frame size: {:?}", e), ); } // Root UI node should be resized too, otherwise progress bar will stay // in wrong position after resize. if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context { let size = size.to_logical(ctx.window.scale_factor()); game.engine .user_interface .send_message(WidgetMessage::width( interface.root, MessageDirection::ToWidget, size.width, )); game.engine .user_interface .send_message(WidgetMessage::height( interface.root, MessageDirection::ToWidget, size.height, )); } } WindowEvent::KeyboardInput { event: input, .. } => { // Handle key input events via `WindowEvent`, not via `DeviceEvent` (#32) if let Some(game_scene) = game.game_scene.as_mut() { game_scene.player.handle_key_event(input, fixed_timestep); } let settings = match input.physical_key { KeyCode::Digit1 => Some(QualitySettings::ultra()), KeyCode::Digit2 => Some(QualitySettings::high()), KeyCode::Digit3 => Some(QualitySettings::medium()), KeyCode::Digit4 => Some(QualitySettings::low()), _ => None, }; if let Some(settings) = settings { if let GraphicsContext::Initialized(ref mut ctx) = game.engine.graphics_context { ctx.renderer .set_quality_settings(&fix_shadows_distance(settings)) .unwrap(); } } } _ => (), } // It is very important to "feed" user interface (UI) with events coming // from main window, otherwise UI won't respond to mouse, keyboard, or any // other event. if let Some(os_event) = translate_event(&event) { game.engine.user_interface.process_os_event(&os_event); } } Event::DeviceEvent { event, .. } => { if let Some(game_scene) = game.game_scene.as_mut() { game_scene .player .handle_device_event(&event, fixed_timestep); } } _ => *control_flow = ControlFlow::Poll, } }); }
main
identifier_name
3rd_person.rs
//! Example 03. 3rd person walk simulator. //! //! Difficulty: Advanced. //! //! This example based on async example, because it requires to load decent amount of //! resources which might be slow on some machines. //! //! In this example we'll create simple 3rd person game with character that can idle, //! walk, or jump. //! //! Also this example demonstrates the power of animation blending machines. Animation //! blending machines are used in all modern games to create complex animations from set //! of simple ones. //! //! TODO: Improve explanations. Some places can be explained better. //! //! Known bugs: Sometimes character will jump, but jumping animations is not playing. //! //! Possible improvements: //! - Smart camera - camera which will not penetrate walls. //! - Separate animation machines for upper and lower body - upper machine might be //! for combat, lower - for locomotion. //! - Tons of them, this is simple example after all. pub mod shared; use crate::shared::{create_ui, fix_shadows_distance, Game, GameScene}; use std::time::Instant; use winit::keyboard::KeyCode; use fyrox::{ core::{ algebra::Vector2, log::{Log, MessageKind}, }, engine::GraphicsContext, event::{Event, WindowEvent}, event_loop::ControlFlow, gui::{ message::MessageDirection, progress_bar::ProgressBarMessage, text::TextMessage, widget::WidgetMessage, }, renderer::QualitySettings, utils::translate_event, }; fn main() { let (mut game, event_loop) = Game::new("Example 03 - 3rd person"); // Create simple user interface that will show some useful info. let interface = create_ui( &mut game.engine.user_interface.build_ctx(), Vector2::new(100.0, 100.0), ); let mut previous = Instant::now(); let fixed_timestep = 1.0 / 60.0; let mut lag = 0.0; // Finally run our event loop which will respond to OS and window events and update // engine state accordingly. event_loop.run(move |event, window_target, control_flow| { match event { Event::MainEventsCleared => { // This is main game loop - it has fixed time step which means that game // code will run at fixed speed even if renderer can't give you desired // 60 fps. let elapsed = previous.elapsed(); previous = Instant::now(); lag += elapsed.as_secs_f32(); while lag >= fixed_timestep { // ************************ // Put your game logic here. // ************************ // Check each frame if our scene is created - here we just trying to lock context // without blocking, it is important for main thread to be functional while other // thread still loading data. if let Ok(mut load_context) = game.load_context.as_ref().unwrap().try_lock() { if let Some(load_result) = load_context.scene_data.take() { // Add scene to engine - engine will take ownership over scene and will return // you a handle to scene which can be used later on to borrow it and do some // actions you need. game.game_scene = Some(GameScene { scene: game.engine.scenes.add(load_result.scene), player: load_result.player, }); // Once scene is loaded, we should hide progress bar and text. game.engine .user_interface .send_message(WidgetMessage::visibility( interface.progress_bar, MessageDirection::ToWidget, false, )); game.engine .user_interface .send_message(WidgetMessage::visibility( interface.progress_text, MessageDirection::ToWidget, false, )); } // Report progress in UI. game.engine .user_interface .send_message(ProgressBarMessage::progress( interface.progress_bar, MessageDirection::ToWidget, load_context.progress, )); game.engine.user_interface.send_message(TextMessage::text( interface.progress_text, MessageDirection::ToWidget, format!( "Loading scene: {}%\n{}", load_context.progress * 100.0, load_context.message ), )); } // Update scene only if it is loaded. if let Some(game_scene) = game.game_scene.as_mut() { // Use stored scene handle to borrow a mutable reference of scene in // engine. let scene = &mut game.engine.scenes[game_scene.scene]; game_scene.player.update(scene, fixed_timestep); } if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context { let debug_text = format!( "Example 03 - 3rd Person\n\ [W][S][A][D] - walk, [SPACE] - jump.\n\ Use [1][2][3][4] to select graphics quality.\n\ {}", ctx.renderer.get_statistics() ); game.engine.user_interface.send_message(TextMessage::text( interface.debug_text, MessageDirection::ToWidget, debug_text, )); } // It is very important to "pump" messages from UI. Even if don't need to // respond to such message, you should call this method, otherwise UI // might behave very weird. while let Some(_ui_event) = game.engine.user_interface.poll_message() { // ************************ // Put your data model synchronization code here. It should // take message and update data in your game according to // changes in UI. // ************************ } game.engine .update(fixed_timestep, control_flow, &mut lag, Default::default()); lag -= fixed_timestep; } // Rendering must be explicitly requested and handled after RedrawRequested event is received. if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context { ctx.window.request_redraw(); } } Event::Resumed => { game.engine .initialize_graphics_context(window_target) .unwrap(); } Event::Suspended => { game.engine.destroy_graphics_context().unwrap(); } Event::RedrawRequested(_) => { // Run renderer at max speed - it is not tied to game code. game.engine.render().unwrap(); } Event::LoopDestroyed => { println!("{:?}", fyrox::core::profiler::print()); } Event::WindowEvent { event, .. } => { match &event { WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit, WindowEvent::Resized(size) => { // It is very important to handle Resized event from window, because // renderer knows nothing about window size - it must be notified // directly when window size has changed. if let Err(e) = game.engine.set_frame_size((*size).into()) { Log::writeln( MessageKind::Error, format!("Unable to set frame size: {:?}", e), ); } // Root UI node should be resized too, otherwise progress bar will stay // in wrong position after resize. if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context { let size = size.to_logical(ctx.window.scale_factor()); game.engine .user_interface .send_message(WidgetMessage::width( interface.root, MessageDirection::ToWidget, size.width, )); game.engine .user_interface .send_message(WidgetMessage::height( interface.root, MessageDirection::ToWidget, size.height, )); } } WindowEvent::KeyboardInput { event: input, .. } => { // Handle key input events via `WindowEvent`, not via `DeviceEvent` (#32) if let Some(game_scene) = game.game_scene.as_mut()
let settings = match input.physical_key { KeyCode::Digit1 => Some(QualitySettings::ultra()), KeyCode::Digit2 => Some(QualitySettings::high()), KeyCode::Digit3 => Some(QualitySettings::medium()), KeyCode::Digit4 => Some(QualitySettings::low()), _ => None, }; if let Some(settings) = settings { if let GraphicsContext::Initialized(ref mut ctx) = game.engine.graphics_context { ctx.renderer .set_quality_settings(&fix_shadows_distance(settings)) .unwrap(); } } } _ => (), } // It is very important to "feed" user interface (UI) with events coming // from main window, otherwise UI won't respond to mouse, keyboard, or any // other event. if let Some(os_event) = translate_event(&event) { game.engine.user_interface.process_os_event(&os_event); } } Event::DeviceEvent { event, .. } => { if let Some(game_scene) = game.game_scene.as_mut() { game_scene .player .handle_device_event(&event, fixed_timestep); } } _ => *control_flow = ControlFlow::Poll, } }); }
{ game_scene.player.handle_key_event(input, fixed_timestep); }
conditional_block
3rd_person.rs
//! Example 03. 3rd person walk simulator. //! //! Difficulty: Advanced. //! //! This example based on async example, because it requires to load decent amount of //! resources which might be slow on some machines. //! //! In this example we'll create simple 3rd person game with character that can idle, //! walk, or jump. //! //! Also this example demonstrates the power of animation blending machines. Animation //! blending machines are used in all modern games to create complex animations from set //! of simple ones. //! //! TODO: Improve explanations. Some places can be explained better. //! //! Known bugs: Sometimes character will jump, but jumping animations is not playing. //! //! Possible improvements: //! - Smart camera - camera which will not penetrate walls. //! - Separate animation machines for upper and lower body - upper machine might be //! for combat, lower - for locomotion. //! - Tons of them, this is simple example after all. pub mod shared; use crate::shared::{create_ui, fix_shadows_distance, Game, GameScene}; use std::time::Instant; use winit::keyboard::KeyCode; use fyrox::{ core::{ algebra::Vector2, log::{Log, MessageKind}, }, engine::GraphicsContext, event::{Event, WindowEvent}, event_loop::ControlFlow, gui::{ message::MessageDirection, progress_bar::ProgressBarMessage, text::TextMessage, widget::WidgetMessage, }, renderer::QualitySettings, utils::translate_event, }; fn main()
{ let (mut game, event_loop) = Game::new("Example 03 - 3rd person"); // Create simple user interface that will show some useful info. let interface = create_ui( &mut game.engine.user_interface.build_ctx(), Vector2::new(100.0, 100.0), ); let mut previous = Instant::now(); let fixed_timestep = 1.0 / 60.0; let mut lag = 0.0; // Finally run our event loop which will respond to OS and window events and update // engine state accordingly. event_loop.run(move |event, window_target, control_flow| { match event { Event::MainEventsCleared => { // This is main game loop - it has fixed time step which means that game // code will run at fixed speed even if renderer can't give you desired // 60 fps. let elapsed = previous.elapsed(); previous = Instant::now(); lag += elapsed.as_secs_f32(); while lag >= fixed_timestep { // ************************ // Put your game logic here. // ************************ // Check each frame if our scene is created - here we just trying to lock context // without blocking, it is important for main thread to be functional while other // thread still loading data. if let Ok(mut load_context) = game.load_context.as_ref().unwrap().try_lock() { if let Some(load_result) = load_context.scene_data.take() { // Add scene to engine - engine will take ownership over scene and will return // you a handle to scene which can be used later on to borrow it and do some // actions you need. game.game_scene = Some(GameScene { scene: game.engine.scenes.add(load_result.scene), player: load_result.player, }); // Once scene is loaded, we should hide progress bar and text. game.engine .user_interface .send_message(WidgetMessage::visibility( interface.progress_bar, MessageDirection::ToWidget, false, )); game.engine .user_interface .send_message(WidgetMessage::visibility( interface.progress_text, MessageDirection::ToWidget, false, )); } // Report progress in UI. game.engine .user_interface .send_message(ProgressBarMessage::progress( interface.progress_bar, MessageDirection::ToWidget, load_context.progress, )); game.engine.user_interface.send_message(TextMessage::text( interface.progress_text, MessageDirection::ToWidget, format!( "Loading scene: {}%\n{}", load_context.progress * 100.0, load_context.message ), )); } // Update scene only if it is loaded. if let Some(game_scene) = game.game_scene.as_mut() { // Use stored scene handle to borrow a mutable reference of scene in // engine. let scene = &mut game.engine.scenes[game_scene.scene]; game_scene.player.update(scene, fixed_timestep); } if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context { let debug_text = format!( "Example 03 - 3rd Person\n\ [W][S][A][D] - walk, [SPACE] - jump.\n\ Use [1][2][3][4] to select graphics quality.\n\ {}", ctx.renderer.get_statistics() ); game.engine.user_interface.send_message(TextMessage::text( interface.debug_text, MessageDirection::ToWidget, debug_text, )); } // It is very important to "pump" messages from UI. Even if don't need to // respond to such message, you should call this method, otherwise UI // might behave very weird. while let Some(_ui_event) = game.engine.user_interface.poll_message() { // ************************ // Put your data model synchronization code here. It should // take message and update data in your game according to // changes in UI. // ************************ } game.engine .update(fixed_timestep, control_flow, &mut lag, Default::default()); lag -= fixed_timestep; } // Rendering must be explicitly requested and handled after RedrawRequested event is received. if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context { ctx.window.request_redraw(); } } Event::Resumed => { game.engine .initialize_graphics_context(window_target) .unwrap(); } Event::Suspended => { game.engine.destroy_graphics_context().unwrap(); } Event::RedrawRequested(_) => { // Run renderer at max speed - it is not tied to game code. game.engine.render().unwrap(); } Event::LoopDestroyed => { println!("{:?}", fyrox::core::profiler::print()); } Event::WindowEvent { event, .. } => { match &event { WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit, WindowEvent::Resized(size) => { // It is very important to handle Resized event from window, because // renderer knows nothing about window size - it must be notified // directly when window size has changed. if let Err(e) = game.engine.set_frame_size((*size).into()) { Log::writeln( MessageKind::Error, format!("Unable to set frame size: {:?}", e), ); } // Root UI node should be resized too, otherwise progress bar will stay // in wrong position after resize. if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context { let size = size.to_logical(ctx.window.scale_factor()); game.engine .user_interface .send_message(WidgetMessage::width( interface.root, MessageDirection::ToWidget, size.width, )); game.engine .user_interface .send_message(WidgetMessage::height( interface.root, MessageDirection::ToWidget, size.height, )); } } WindowEvent::KeyboardInput { event: input, .. } => { // Handle key input events via `WindowEvent`, not via `DeviceEvent` (#32) if let Some(game_scene) = game.game_scene.as_mut() { game_scene.player.handle_key_event(input, fixed_timestep); } let settings = match input.physical_key { KeyCode::Digit1 => Some(QualitySettings::ultra()), KeyCode::Digit2 => Some(QualitySettings::high()), KeyCode::Digit3 => Some(QualitySettings::medium()), KeyCode::Digit4 => Some(QualitySettings::low()), _ => None, }; if let Some(settings) = settings { if let GraphicsContext::Initialized(ref mut ctx) = game.engine.graphics_context { ctx.renderer .set_quality_settings(&fix_shadows_distance(settings)) .unwrap(); } } } _ => (), } // It is very important to "feed" user interface (UI) with events coming // from main window, otherwise UI won't respond to mouse, keyboard, or any // other event. if let Some(os_event) = translate_event(&event) { game.engine.user_interface.process_os_event(&os_event); } } Event::DeviceEvent { event, .. } => { if let Some(game_scene) = game.game_scene.as_mut() { game_scene .player .handle_device_event(&event, fixed_timestep); } } _ => *control_flow = ControlFlow::Poll, } }); }
identifier_body
3rd_person.rs
//! Example 03. 3rd person walk simulator. //! //! Difficulty: Advanced. //! //! This example based on async example, because it requires to load decent amount of //! resources which might be slow on some machines. //! //! In this example we'll create simple 3rd person game with character that can idle, //! walk, or jump. //!
//! blending machines are used in all modern games to create complex animations from set //! of simple ones. //! //! TODO: Improve explanations. Some places can be explained better. //! //! Known bugs: Sometimes character will jump, but jumping animations is not playing. //! //! Possible improvements: //! - Smart camera - camera which will not penetrate walls. //! - Separate animation machines for upper and lower body - upper machine might be //! for combat, lower - for locomotion. //! - Tons of them, this is simple example after all. pub mod shared; use crate::shared::{create_ui, fix_shadows_distance, Game, GameScene}; use std::time::Instant; use winit::keyboard::KeyCode; use fyrox::{ core::{ algebra::Vector2, log::{Log, MessageKind}, }, engine::GraphicsContext, event::{Event, WindowEvent}, event_loop::ControlFlow, gui::{ message::MessageDirection, progress_bar::ProgressBarMessage, text::TextMessage, widget::WidgetMessage, }, renderer::QualitySettings, utils::translate_event, }; fn main() { let (mut game, event_loop) = Game::new("Example 03 - 3rd person"); // Create simple user interface that will show some useful info. let interface = create_ui( &mut game.engine.user_interface.build_ctx(), Vector2::new(100.0, 100.0), ); let mut previous = Instant::now(); let fixed_timestep = 1.0 / 60.0; let mut lag = 0.0; // Finally run our event loop which will respond to OS and window events and update // engine state accordingly. event_loop.run(move |event, window_target, control_flow| { match event { Event::MainEventsCleared => { // This is main game loop - it has fixed time step which means that game // code will run at fixed speed even if renderer can't give you desired // 60 fps. let elapsed = previous.elapsed(); previous = Instant::now(); lag += elapsed.as_secs_f32(); while lag >= fixed_timestep { // ************************ // Put your game logic here. // ************************ // Check each frame if our scene is created - here we just trying to lock context // without blocking, it is important for main thread to be functional while other // thread still loading data. if let Ok(mut load_context) = game.load_context.as_ref().unwrap().try_lock() { if let Some(load_result) = load_context.scene_data.take() { // Add scene to engine - engine will take ownership over scene and will return // you a handle to scene which can be used later on to borrow it and do some // actions you need. game.game_scene = Some(GameScene { scene: game.engine.scenes.add(load_result.scene), player: load_result.player, }); // Once scene is loaded, we should hide progress bar and text. game.engine .user_interface .send_message(WidgetMessage::visibility( interface.progress_bar, MessageDirection::ToWidget, false, )); game.engine .user_interface .send_message(WidgetMessage::visibility( interface.progress_text, MessageDirection::ToWidget, false, )); } // Report progress in UI. game.engine .user_interface .send_message(ProgressBarMessage::progress( interface.progress_bar, MessageDirection::ToWidget, load_context.progress, )); game.engine.user_interface.send_message(TextMessage::text( interface.progress_text, MessageDirection::ToWidget, format!( "Loading scene: {}%\n{}", load_context.progress * 100.0, load_context.message ), )); } // Update scene only if it is loaded. if let Some(game_scene) = game.game_scene.as_mut() { // Use stored scene handle to borrow a mutable reference of scene in // engine. let scene = &mut game.engine.scenes[game_scene.scene]; game_scene.player.update(scene, fixed_timestep); } if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context { let debug_text = format!( "Example 03 - 3rd Person\n\ [W][S][A][D] - walk, [SPACE] - jump.\n\ Use [1][2][3][4] to select graphics quality.\n\ {}", ctx.renderer.get_statistics() ); game.engine.user_interface.send_message(TextMessage::text( interface.debug_text, MessageDirection::ToWidget, debug_text, )); } // It is very important to "pump" messages from UI. Even if don't need to // respond to such message, you should call this method, otherwise UI // might behave very weird. while let Some(_ui_event) = game.engine.user_interface.poll_message() { // ************************ // Put your data model synchronization code here. It should // take message and update data in your game according to // changes in UI. // ************************ } game.engine .update(fixed_timestep, control_flow, &mut lag, Default::default()); lag -= fixed_timestep; } // Rendering must be explicitly requested and handled after RedrawRequested event is received. if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context { ctx.window.request_redraw(); } } Event::Resumed => { game.engine .initialize_graphics_context(window_target) .unwrap(); } Event::Suspended => { game.engine.destroy_graphics_context().unwrap(); } Event::RedrawRequested(_) => { // Run renderer at max speed - it is not tied to game code. game.engine.render().unwrap(); } Event::LoopDestroyed => { println!("{:?}", fyrox::core::profiler::print()); } Event::WindowEvent { event, .. } => { match &event { WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit, WindowEvent::Resized(size) => { // It is very important to handle Resized event from window, because // renderer knows nothing about window size - it must be notified // directly when window size has changed. if let Err(e) = game.engine.set_frame_size((*size).into()) { Log::writeln( MessageKind::Error, format!("Unable to set frame size: {:?}", e), ); } // Root UI node should be resized too, otherwise progress bar will stay // in wrong position after resize. if let GraphicsContext::Initialized(ref ctx) = game.engine.graphics_context { let size = size.to_logical(ctx.window.scale_factor()); game.engine .user_interface .send_message(WidgetMessage::width( interface.root, MessageDirection::ToWidget, size.width, )); game.engine .user_interface .send_message(WidgetMessage::height( interface.root, MessageDirection::ToWidget, size.height, )); } } WindowEvent::KeyboardInput { event: input, .. } => { // Handle key input events via `WindowEvent`, not via `DeviceEvent` (#32) if let Some(game_scene) = game.game_scene.as_mut() { game_scene.player.handle_key_event(input, fixed_timestep); } let settings = match input.physical_key { KeyCode::Digit1 => Some(QualitySettings::ultra()), KeyCode::Digit2 => Some(QualitySettings::high()), KeyCode::Digit3 => Some(QualitySettings::medium()), KeyCode::Digit4 => Some(QualitySettings::low()), _ => None, }; if let Some(settings) = settings { if let GraphicsContext::Initialized(ref mut ctx) = game.engine.graphics_context { ctx.renderer .set_quality_settings(&fix_shadows_distance(settings)) .unwrap(); } } } _ => (), } // It is very important to "feed" user interface (UI) with events coming // from main window, otherwise UI won't respond to mouse, keyboard, or any // other event. if let Some(os_event) = translate_event(&event) { game.engine.user_interface.process_os_event(&os_event); } } Event::DeviceEvent { event, .. } => { if let Some(game_scene) = game.game_scene.as_mut() { game_scene .player .handle_device_event(&event, fixed_timestep); } } _ => *control_flow = ControlFlow::Poll, } }); }
//! Also this example demonstrates the power of animation blending machines. Animation
random_line_split
cargo-deploy.rs
//! # `cargo deploy` //! Run a binary on a constellation cluster //! //! ## Usage //! ```text //! cargo deploy [options] <host> [--] [<args>]... //! ``` //! //! ## Options //! ```text //! -h --help Show this screen. //! -V --version Show version. //! --format=<fmt> Output format [possible values: human, json] [defa ult: human] //! ``` //! //! Note: --format can also be given as an env var, such as `CONSTELLATION_FORMAT=json` #![warn( missing_copy_implementations, missing_debug_implementations, missing_docs, trivial_numeric_casts, unused_extern_crates, unused_import_braces, unused_qualifications, unused_results, clippy::pedantic )] // from https://github.com/rust-unofficial/patterns/blob/master/anti_patterns/deny-warnings.md use clap::{crate_version, App, AppSettings, Arg, ArgMatches, SubCommand}; use std::{ convert::TryInto, env, ffi::{OsStr, OsString}, iter, net::SocketAddr, process }; use constellation_internal::Format; fn main() { let args = cli().get_matches(); let args = args.subcommand_matches("deploy").unwrap(); let host: SocketAddr = args.value_of("host").unwrap().parse().unwrap(); let forward_args: Vec<&OsStr> = args.values_of_os("args").unwrap_or_default().collect(); let output = cargo(args) .stderr(process::Stdio::inherit()) .output() .expect("Failed to invoke cargo"); if !output.status.success() { process::exit(output.status.code().unwrap_or(101)); } let mut bin = Vec::new(); for message in serde_json::Deserializer::from_slice(&output.stdout).into_iter() { if let cargo_metadata::Message::CompilerArtifact(artifact) = message.unwrap_or_else(|_| panic!("Failed to parse output of cargo")) { if artifact.target.kind == vec![String::from("bin")] || artifact.target.kind == vec![String::from("example")] { bin.push(( artifact.target.name, artifact.filenames.into_iter().next().unwrap(), )); // We're assuming the first filename is the binary – .dSYM etc seem to always be second? } } } if bin.len() > 1 { let names = bin .into_iter() .map(|(target_name, _)| target_name) .collect::<Vec<_>>(); println!( "`cargo deploy` could not determine which binary to run. \ Use the `--bin` option to specify a binary.\n\ available binaries: {}", names.join(", ") ); // , or the `default-run` manifest key // TODO: work out best way to get this / behave same as cargo run process::exit(1); } else if bin.is_empty() { println!("a bin target must be available for `cargo deploy`"); process::exit(1); } let path = bin.into_iter().next().unwrap().1; let args: Vec<OsString> = iter::once(OsString::from(&path)) .chain(forward_args.into_iter().map(ToOwned::to_owned)) .collect(); let vars: Vec<(OsString, OsString)> = env::vars_os().collect(); let format = Format::Human; constellation::deploy(host, &path, format, args, vars); } fn cli<'a, 'b>() -> App<'a, 'b> { // https://github.com/rust-lang/cargo/blob/7059559d71de3fffe8c8cb81e32f323454aa96c5/src/bin/cargo/cli.rs#L205-L277 // https://github.com/rust-lang/cargo/blob/982622252a64d7c526c04a244f1a81523dc9ae54/src/bin/cargo/commands/run.rs App::new("cargo") .bin_name("cargo") .settings(&[ AppSettings::UnifiedHelpMessage, AppSettings::DeriveDisplayOrder, AppSettings::SubcommandRequired, ]) .arg( Arg::opt( "verbose", "Use verbose output (-vv very verbose/build.rs output)", ) .short("v") .multiple(true) .global(true), ) .arg( Arg::opt("color", "Coloring: auto, always, never") .value_name("WHEN") .global(true), ) .arg(Arg::opt("frozen", "Require Cargo.lock and cache are up to date").global(true)) .arg(Arg::opt("locked", "Require Cargo.lock is up to date").global(true)) .arg(Arg::opt("offline", "Run without accessing the network").global(true)) .arg( Arg::multi_opt("config", "KEY=VALUE", "Override a configuration value") .global(true) .hidden(true), ) .arg( Arg::with_name("unstable-features") .help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details") .short("Z") .value_name("FLAG") .multiple(true) .number_of_values(1) .global(true), ) .subcommand( SubCommand::with_name("deploy") .settings(&[ AppSettings::UnifiedHelpMessage, AppSettings::DeriveDisplayOrder, AppSettings::DontCollapseArgsInUsage, AppSettings::TrailingVarArg, ]) .version(crate_version!()) .about("Run a binary or example of the local package on a constellation cluster") // .arg(Arg::opt("quiet", "No output printed to stdout").short("q")) .arg( Arg::with_name("host") .help("Constellation cluster node to connect to (e.g. 10.0.0.1:8888)") .required(true) .validator(|host| { host.parse::<SocketAddr>() .map(drop) .map_err(|err| err.to_string()) }), ) .arg(Arg::with_name("args").multiple(true)) .args(&Arg::targets_bin_example( "Name of the bin target to run", "Name of the example target to run", )) .arg(Arg::package("Package with the target to run")) .arg(Arg::jobs()) .arg(Arg::release( "Build artifacts in release mode, with optimizations", )) .arg(Arg::profile("Build artifacts with the specified profile")) .args(&Arg::features()) .arg(Arg::target_triple("Build for the target triple")) .arg(Arg::target_dir()) .arg(Arg::manifest_path()) // .arg(Arg::message_format()) .after_help( "\ If neither `--bin` nor `--example` are given, then if the package only has one bin target it will be run. Otherwise `--bin` specifies the bin target to run, and `--example` specifies the example target to run. At most one of `--bin` or `--example` can be provided. All the arguments following the two dashes (`--`) are passed to the binary to run. If you're passing arguments to both Cargo and the binary, the ones after `--` go to the binary, the ones before go to Cargo. ", ), ) } fn cargo(args: &ArgMatches) -> process::Command { let verbose: u64 = args.occurrences_of("verbose"); let color: Option<&str> = args.value_of("color"); let frozen: bool = args.is_present("frozen"); let locked: bool = args.is_present("locked"); let offline: bool = args.is_present("offline"); let config: Vec<&str> = args.values_of("config").unwrap_or_default().collect(); let unstable_features: Vec<&OsStr> = args .values_of_os("unstable-features") .unwrap_or_default() .collect(); let bin: Vec<&str> = args.values_of("bin").unwrap_or_default().collect(); let example: Vec<&str> = args.values_of("example").unwrap_or_default().collect(); let package: Vec<&str> = args.values_of("package").unwrap_or_default().collect(); let jobs: Option<&str> = args.value_of("jobs"); let release: bool = args.is_present("release"); let profile: Option<&str> = args.value_of("profile"); let features: Vec<&str> = args.values_of("features").unwrap_or_default().collect(); let all_features: bool = args.is_present("all-features"); let no_default_features: bool = args.is_present("no-default-features"); let target: Option<&str> = args.value_of("target"); let target_dir: Option<&str> = args.value_of("target-dir"); let manifest_path: Option<&str> = args.value_of("manifest-path"); // let mut args: Vec<String> = Vec::new(); let mut cargo = process::Command::new("cargo"); let _ = cargo.arg("build"); let _ = cargo.arg("--message-format=json"); if verbose > 0 { let _ = cargo.arg(format!("-{}", "v".repeat(verbose.try_into().unwrap()))); } if let Some(color) = color { let _ = cargo.arg(format!("--color={}", color)); } if frozen { let _ = cargo.arg("--frozen"); } if locked { let _ = cargo.arg("--locked"); } if offline { let _ = cargo.arg("--offline"); } for config in config { let _ = cargo.arg(format!("--config={}", config)); } for unstable_features in unstable_features { let mut arg = OsString::from("-Z"); arg.push(unstable_features); let _ = cargo.arg(arg); } for bin in bin { let _ = cargo.arg(format!("--bin={}", bin)); } for example in example { let _ = cargo.arg(format!("--example={}", example)); } for package in package { let _ = cargo.arg(format!("--package={}", package)); } if let Some(jobs) = jobs { let _ = cargo.arg(format!("--jobs={}", jobs)); } if release {
f let Some(profile) = profile { let _ = cargo.arg(format!("--profile={}", profile)); } for features in features { let _ = cargo.arg(format!("--features={}", features)); } if all_features { let _ = cargo.arg("--all-features"); } if no_default_features { let _ = cargo.arg("--no-default-features"); } if let Some(target) = target { let _ = cargo.arg(format!("--target={}", target)); } if let Some(target_dir) = target_dir { let _ = cargo.arg(format!("--target-dir={}", target_dir)); } if let Some(manifest_path) = manifest_path { let _ = cargo.arg(format!("--manifest-path={}", manifest_path)); } cargo } // https://github.com/rust-lang/cargo/blob/7059559d71de3fffe8c8cb81e32f323454aa96c5/src/cargo/util/command_prelude.rs trait ArgExt: Sized { fn opt(name: &'static str, help: &'static str) -> Self; fn optional_multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self; fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self; fn targets_bin_example(bin: &'static str, example: &'static str) -> [Self; 2]; fn package(package: &'static str) -> Self; fn jobs() -> Self; fn release(release: &'static str) -> Self; fn profile(profile: &'static str) -> Self; fn features() -> [Self; 3]; fn target_triple(target: &'static str) -> Self; fn target_dir() -> Self; fn manifest_path() -> Self; } impl<'a, 'b> ArgExt for Arg<'a, 'b> { fn opt(name: &'static str, help: &'static str) -> Self { Arg::with_name(name).long(name).help(help) } fn optional_multi_opt( name: &'static str, value_name: &'static str, help: &'static str, ) -> Self { Self::opt(name, help) .value_name(value_name) .multiple(true) .min_values(0) .number_of_values(1) } fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self { // Note that all `.multiple(true)` arguments in Cargo should specify // `.number_of_values(1)` as well, so that `--foo val1 val2` is // *not* parsed as `foo` with values ["val1", "val2"]. // `number_of_values` should become the default in clap 3. Self::opt(name, help) .value_name(value_name) .multiple(true) .number_of_values(1) } fn targets_bin_example(bin: &'static str, example: &'static str) -> [Self; 2] { [ Self::optional_multi_opt("bin", "NAME", bin), Self::optional_multi_opt("example", "NAME", example), ] } fn package(package: &'static str) -> Self { Self::opt("package", package).short("p").value_name("SPEC") } fn jobs() -> Self { Self::opt("jobs", "Number of parallel jobs, defaults to # of CPUs") .short("j") .value_name("N") } fn release(release: &'static str) -> Self { Self::opt("release", release) } fn profile(profile: &'static str) -> Self { Self::opt("profile", profile).value_name("PROFILE-NAME") } fn features() -> [Self; 3] { [ Self::multi_opt( "features", "FEATURES", "Space-separated list of features to activate", ), Self::opt("all-features", "Activate all available features"), Self::opt( "no-default-features", "Do not activate the `default` feature", ), ] } fn target_triple(target: &'static str) -> Self { Self::opt("target", target).value_name("TRIPLE") } fn target_dir() -> Self { Self::opt("target-dir", "Directory for all generated artifacts").value_name("DIRECTORY") } fn manifest_path() -> Self { Self::opt("manifest-path", "Path to Cargo.toml").value_name("PATH") } }
let _ = cargo.arg("--release"); } i
conditional_block
cargo-deploy.rs
//! # `cargo deploy` //! Run a binary on a constellation cluster //! //! ## Usage //! ```text //! cargo deploy [options] <host> [--] [<args>]... //! ``` //! //! ## Options //! ```text //! -h --help Show this screen. //! -V --version Show version. //! --format=<fmt> Output format [possible values: human, json] [defa ult: human] //! ``` //! //! Note: --format can also be given as an env var, such as `CONSTELLATION_FORMAT=json` #![warn( missing_copy_implementations, missing_debug_implementations, missing_docs, trivial_numeric_casts, unused_extern_crates, unused_import_braces, unused_qualifications, unused_results, clippy::pedantic )] // from https://github.com/rust-unofficial/patterns/blob/master/anti_patterns/deny-warnings.md use clap::{crate_version, App, AppSettings, Arg, ArgMatches, SubCommand}; use std::{ convert::TryInto, env, ffi::{OsStr, OsString}, iter, net::SocketAddr, process }; use constellation_internal::Format; fn main() { let args = cli().get_matches(); let args = args.subcommand_matches("deploy").unwrap(); let host: SocketAddr = args.value_of("host").unwrap().parse().unwrap(); let forward_args: Vec<&OsStr> = args.values_of_os("args").unwrap_or_default().collect(); let output = cargo(args) .stderr(process::Stdio::inherit()) .output() .expect("Failed to invoke cargo"); if !output.status.success() { process::exit(output.status.code().unwrap_or(101)); } let mut bin = Vec::new(); for message in serde_json::Deserializer::from_slice(&output.stdout).into_iter() { if let cargo_metadata::Message::CompilerArtifact(artifact) = message.unwrap_or_else(|_| panic!("Failed to parse output of cargo")) { if artifact.target.kind == vec![String::from("bin")] || artifact.target.kind == vec![String::from("example")] { bin.push(( artifact.target.name, artifact.filenames.into_iter().next().unwrap(), )); // We're assuming the first filename is the binary – .dSYM etc seem to always be second? } } } if bin.len() > 1 { let names = bin .into_iter() .map(|(target_name, _)| target_name) .collect::<Vec<_>>(); println!( "`cargo deploy` could not determine which binary to run. \ Use the `--bin` option to specify a binary.\n\ available binaries: {}", names.join(", ") ); // , or the `default-run` manifest key // TODO: work out best way to get this / behave same as cargo run process::exit(1); } else if bin.is_empty() { println!("a bin target must be available for `cargo deploy`"); process::exit(1); } let path = bin.into_iter().next().unwrap().1; let args: Vec<OsString> = iter::once(OsString::from(&path)) .chain(forward_args.into_iter().map(ToOwned::to_owned)) .collect(); let vars: Vec<(OsString, OsString)> = env::vars_os().collect(); let format = Format::Human; constellation::deploy(host, &path, format, args, vars); } fn cli<'a, 'b>() -> App<'a, 'b> { // https://github.com/rust-lang/cargo/blob/7059559d71de3fffe8c8cb81e32f323454aa96c5/src/bin/cargo/cli.rs#L205-L277 // https://github.com/rust-lang/cargo/blob/982622252a64d7c526c04a244f1a81523dc9ae54/src/bin/cargo/commands/run.rs App::new("cargo") .bin_name("cargo") .settings(&[ AppSettings::UnifiedHelpMessage, AppSettings::DeriveDisplayOrder, AppSettings::SubcommandRequired, ]) .arg( Arg::opt( "verbose", "Use verbose output (-vv very verbose/build.rs output)", ) .short("v") .multiple(true) .global(true), ) .arg( Arg::opt("color", "Coloring: auto, always, never") .value_name("WHEN") .global(true), ) .arg(Arg::opt("frozen", "Require Cargo.lock and cache are up to date").global(true)) .arg(Arg::opt("locked", "Require Cargo.lock is up to date").global(true)) .arg(Arg::opt("offline", "Run without accessing the network").global(true)) .arg( Arg::multi_opt("config", "KEY=VALUE", "Override a configuration value") .global(true) .hidden(true), ) .arg( Arg::with_name("unstable-features") .help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details") .short("Z") .value_name("FLAG") .multiple(true) .number_of_values(1) .global(true), ) .subcommand( SubCommand::with_name("deploy") .settings(&[ AppSettings::UnifiedHelpMessage, AppSettings::DeriveDisplayOrder, AppSettings::DontCollapseArgsInUsage, AppSettings::TrailingVarArg, ]) .version(crate_version!()) .about("Run a binary or example of the local package on a constellation cluster") // .arg(Arg::opt("quiet", "No output printed to stdout").short("q")) .arg( Arg::with_name("host") .help("Constellation cluster node to connect to (e.g. 10.0.0.1:8888)") .required(true) .validator(|host| { host.parse::<SocketAddr>() .map(drop) .map_err(|err| err.to_string()) }), ) .arg(Arg::with_name("args").multiple(true)) .args(&Arg::targets_bin_example( "Name of the bin target to run", "Name of the example target to run", )) .arg(Arg::package("Package with the target to run")) .arg(Arg::jobs()) .arg(Arg::release( "Build artifacts in release mode, with optimizations", )) .arg(Arg::profile("Build artifacts with the specified profile")) .args(&Arg::features()) .arg(Arg::target_triple("Build for the target triple")) .arg(Arg::target_dir()) .arg(Arg::manifest_path()) // .arg(Arg::message_format()) .after_help( "\ If neither `--bin` nor `--example` are given, then if the package only has one bin target it will be run. Otherwise `--bin` specifies the bin target to run, and `--example` specifies the example target to run. At most one of `--bin` or `--example` can be provided. All the arguments following the two dashes (`--`) are passed to the binary to run. If you're passing arguments to both Cargo and the binary, the ones after `--` go to the binary, the ones before go to Cargo. ", ), ) } fn cargo(args: &ArgMatches) -> process::Command { let verbose: u64 = args.occurrences_of("verbose"); let color: Option<&str> = args.value_of("color"); let frozen: bool = args.is_present("frozen"); let locked: bool = args.is_present("locked"); let offline: bool = args.is_present("offline"); let config: Vec<&str> = args.values_of("config").unwrap_or_default().collect(); let unstable_features: Vec<&OsStr> = args .values_of_os("unstable-features") .unwrap_or_default() .collect(); let bin: Vec<&str> = args.values_of("bin").unwrap_or_default().collect(); let example: Vec<&str> = args.values_of("example").unwrap_or_default().collect(); let package: Vec<&str> = args.values_of("package").unwrap_or_default().collect(); let jobs: Option<&str> = args.value_of("jobs"); let release: bool = args.is_present("release"); let profile: Option<&str> = args.value_of("profile"); let features: Vec<&str> = args.values_of("features").unwrap_or_default().collect(); let all_features: bool = args.is_present("all-features"); let no_default_features: bool = args.is_present("no-default-features"); let target: Option<&str> = args.value_of("target"); let target_dir: Option<&str> = args.value_of("target-dir"); let manifest_path: Option<&str> = args.value_of("manifest-path"); // let mut args: Vec<String> = Vec::new(); let mut cargo = process::Command::new("cargo"); let _ = cargo.arg("build"); let _ = cargo.arg("--message-format=json"); if verbose > 0 { let _ = cargo.arg(format!("-{}", "v".repeat(verbose.try_into().unwrap()))); } if let Some(color) = color { let _ = cargo.arg(format!("--color={}", color)); } if frozen { let _ = cargo.arg("--frozen"); } if locked { let _ = cargo.arg("--locked"); } if offline { let _ = cargo.arg("--offline"); } for config in config { let _ = cargo.arg(format!("--config={}", config)); } for unstable_features in unstable_features { let mut arg = OsString::from("-Z"); arg.push(unstable_features); let _ = cargo.arg(arg); } for bin in bin { let _ = cargo.arg(format!("--bin={}", bin)); } for example in example { let _ = cargo.arg(format!("--example={}", example)); } for package in package { let _ = cargo.arg(format!("--package={}", package)); } if let Some(jobs) = jobs { let _ = cargo.arg(format!("--jobs={}", jobs)); } if release { let _ = cargo.arg("--release"); } if let Some(profile) = profile { let _ = cargo.arg(format!("--profile={}", profile)); } for features in features { let _ = cargo.arg(format!("--features={}", features)); } if all_features { let _ = cargo.arg("--all-features"); } if no_default_features { let _ = cargo.arg("--no-default-features"); } if let Some(target) = target { let _ = cargo.arg(format!("--target={}", target)); } if let Some(target_dir) = target_dir { let _ = cargo.arg(format!("--target-dir={}", target_dir)); } if let Some(manifest_path) = manifest_path { let _ = cargo.arg(format!("--manifest-path={}", manifest_path)); } cargo } // https://github.com/rust-lang/cargo/blob/7059559d71de3fffe8c8cb81e32f323454aa96c5/src/cargo/util/command_prelude.rs trait ArgExt: Sized { fn opt(name: &'static str, help: &'static str) -> Self; fn optional_multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self; fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self; fn targets_bin_example(bin: &'static str, example: &'static str) -> [Self; 2]; fn package(package: &'static str) -> Self; fn jobs() -> Self; fn release(release: &'static str) -> Self; fn profile(profile: &'static str) -> Self; fn features() -> [Self; 3]; fn target_triple(target: &'static str) -> Self; fn target_dir() -> Self; fn manifest_path() -> Self; } impl<'a, 'b> ArgExt for Arg<'a, 'b> { fn opt(name: &'static str, help: &'static str) -> Self { Arg::with_name(name).long(name).help(help) } fn optional_multi_opt( name: &'static str, value_name: &'static str, help: &'static str, ) -> Self {
n multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self { // Note that all `.multiple(true)` arguments in Cargo should specify // `.number_of_values(1)` as well, so that `--foo val1 val2` is // *not* parsed as `foo` with values ["val1", "val2"]. // `number_of_values` should become the default in clap 3. Self::opt(name, help) .value_name(value_name) .multiple(true) .number_of_values(1) } fn targets_bin_example(bin: &'static str, example: &'static str) -> [Self; 2] { [ Self::optional_multi_opt("bin", "NAME", bin), Self::optional_multi_opt("example", "NAME", example), ] } fn package(package: &'static str) -> Self { Self::opt("package", package).short("p").value_name("SPEC") } fn jobs() -> Self { Self::opt("jobs", "Number of parallel jobs, defaults to # of CPUs") .short("j") .value_name("N") } fn release(release: &'static str) -> Self { Self::opt("release", release) } fn profile(profile: &'static str) -> Self { Self::opt("profile", profile).value_name("PROFILE-NAME") } fn features() -> [Self; 3] { [ Self::multi_opt( "features", "FEATURES", "Space-separated list of features to activate", ), Self::opt("all-features", "Activate all available features"), Self::opt( "no-default-features", "Do not activate the `default` feature", ), ] } fn target_triple(target: &'static str) -> Self { Self::opt("target", target).value_name("TRIPLE") } fn target_dir() -> Self { Self::opt("target-dir", "Directory for all generated artifacts").value_name("DIRECTORY") } fn manifest_path() -> Self { Self::opt("manifest-path", "Path to Cargo.toml").value_name("PATH") } }
Self::opt(name, help) .value_name(value_name) .multiple(true) .min_values(0) .number_of_values(1) } f
identifier_body
cargo-deploy.rs
//! # `cargo deploy` //! Run a binary on a constellation cluster //! //! ## Usage //! ```text //! cargo deploy [options] <host> [--] [<args>]... //! ``` //! //! ## Options //! ```text //! -h --help Show this screen. //! -V --version Show version. //! --format=<fmt> Output format [possible values: human, json] [defa ult: human] //! ``` //! //! Note: --format can also be given as an env var, such as `CONSTELLATION_FORMAT=json` #![warn( missing_copy_implementations, missing_debug_implementations, missing_docs, trivial_numeric_casts, unused_extern_crates, unused_import_braces, unused_qualifications, unused_results, clippy::pedantic )] // from https://github.com/rust-unofficial/patterns/blob/master/anti_patterns/deny-warnings.md use clap::{crate_version, App, AppSettings, Arg, ArgMatches, SubCommand}; use std::{ convert::TryInto, env, ffi::{OsStr, OsString}, iter, net::SocketAddr, process }; use constellation_internal::Format; fn main() { let args = cli().get_matches(); let args = args.subcommand_matches("deploy").unwrap(); let host: SocketAddr = args.value_of("host").unwrap().parse().unwrap(); let forward_args: Vec<&OsStr> = args.values_of_os("args").unwrap_or_default().collect(); let output = cargo(args) .stderr(process::Stdio::inherit()) .output() .expect("Failed to invoke cargo"); if !output.status.success() { process::exit(output.status.code().unwrap_or(101)); } let mut bin = Vec::new(); for message in serde_json::Deserializer::from_slice(&output.stdout).into_iter() { if let cargo_metadata::Message::CompilerArtifact(artifact) = message.unwrap_or_else(|_| panic!("Failed to parse output of cargo")) { if artifact.target.kind == vec![String::from("bin")] || artifact.target.kind == vec![String::from("example")] { bin.push(( artifact.target.name, artifact.filenames.into_iter().next().unwrap(), )); // We're assuming the first filename is the binary – .dSYM etc seem to always be second? } } } if bin.len() > 1 { let names = bin .into_iter() .map(|(target_name, _)| target_name) .collect::<Vec<_>>(); println!( "`cargo deploy` could not determine which binary to run. \ Use the `--bin` option to specify a binary.\n\ available binaries: {}", names.join(", ") ); // , or the `default-run` manifest key // TODO: work out best way to get this / behave same as cargo run process::exit(1); } else if bin.is_empty() { println!("a bin target must be available for `cargo deploy`"); process::exit(1); } let path = bin.into_iter().next().unwrap().1; let args: Vec<OsString> = iter::once(OsString::from(&path)) .chain(forward_args.into_iter().map(ToOwned::to_owned)) .collect(); let vars: Vec<(OsString, OsString)> = env::vars_os().collect(); let format = Format::Human; constellation::deploy(host, &path, format, args, vars); } fn cli<'a, 'b>() -> App<'a, 'b> { // https://github.com/rust-lang/cargo/blob/7059559d71de3fffe8c8cb81e32f323454aa96c5/src/bin/cargo/cli.rs#L205-L277 // https://github.com/rust-lang/cargo/blob/982622252a64d7c526c04a244f1a81523dc9ae54/src/bin/cargo/commands/run.rs App::new("cargo") .bin_name("cargo") .settings(&[ AppSettings::UnifiedHelpMessage, AppSettings::DeriveDisplayOrder, AppSettings::SubcommandRequired, ]) .arg( Arg::opt( "verbose", "Use verbose output (-vv very verbose/build.rs output)", ) .short("v") .multiple(true) .global(true), ) .arg( Arg::opt("color", "Coloring: auto, always, never") .value_name("WHEN") .global(true), ) .arg(Arg::opt("frozen", "Require Cargo.lock and cache are up to date").global(true)) .arg(Arg::opt("locked", "Require Cargo.lock is up to date").global(true)) .arg(Arg::opt("offline", "Run without accessing the network").global(true)) .arg( Arg::multi_opt("config", "KEY=VALUE", "Override a configuration value") .global(true) .hidden(true), ) .arg( Arg::with_name("unstable-features") .help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details") .short("Z") .value_name("FLAG") .multiple(true) .number_of_values(1) .global(true), ) .subcommand( SubCommand::with_name("deploy") .settings(&[ AppSettings::UnifiedHelpMessage, AppSettings::DeriveDisplayOrder, AppSettings::DontCollapseArgsInUsage, AppSettings::TrailingVarArg, ]) .version(crate_version!()) .about("Run a binary or example of the local package on a constellation cluster") // .arg(Arg::opt("quiet", "No output printed to stdout").short("q")) .arg( Arg::with_name("host") .help("Constellation cluster node to connect to (e.g. 10.0.0.1:8888)") .required(true) .validator(|host| { host.parse::<SocketAddr>() .map(drop) .map_err(|err| err.to_string()) }), ) .arg(Arg::with_name("args").multiple(true)) .args(&Arg::targets_bin_example( "Name of the bin target to run", "Name of the example target to run", )) .arg(Arg::package("Package with the target to run")) .arg(Arg::jobs()) .arg(Arg::release( "Build artifacts in release mode, with optimizations", )) .arg(Arg::profile("Build artifacts with the specified profile")) .args(&Arg::features()) .arg(Arg::target_triple("Build for the target triple")) .arg(Arg::target_dir()) .arg(Arg::manifest_path()) // .arg(Arg::message_format()) .after_help( "\ If neither `--bin` nor `--example` are given, then if the package only has one bin target it will be run. Otherwise `--bin` specifies the bin target to run,
`--` go to the binary, the ones before go to Cargo. ", ), ) } fn cargo(args: &ArgMatches) -> process::Command { let verbose: u64 = args.occurrences_of("verbose"); let color: Option<&str> = args.value_of("color"); let frozen: bool = args.is_present("frozen"); let locked: bool = args.is_present("locked"); let offline: bool = args.is_present("offline"); let config: Vec<&str> = args.values_of("config").unwrap_or_default().collect(); let unstable_features: Vec<&OsStr> = args .values_of_os("unstable-features") .unwrap_or_default() .collect(); let bin: Vec<&str> = args.values_of("bin").unwrap_or_default().collect(); let example: Vec<&str> = args.values_of("example").unwrap_or_default().collect(); let package: Vec<&str> = args.values_of("package").unwrap_or_default().collect(); let jobs: Option<&str> = args.value_of("jobs"); let release: bool = args.is_present("release"); let profile: Option<&str> = args.value_of("profile"); let features: Vec<&str> = args.values_of("features").unwrap_or_default().collect(); let all_features: bool = args.is_present("all-features"); let no_default_features: bool = args.is_present("no-default-features"); let target: Option<&str> = args.value_of("target"); let target_dir: Option<&str> = args.value_of("target-dir"); let manifest_path: Option<&str> = args.value_of("manifest-path"); // let mut args: Vec<String> = Vec::new(); let mut cargo = process::Command::new("cargo"); let _ = cargo.arg("build"); let _ = cargo.arg("--message-format=json"); if verbose > 0 { let _ = cargo.arg(format!("-{}", "v".repeat(verbose.try_into().unwrap()))); } if let Some(color) = color { let _ = cargo.arg(format!("--color={}", color)); } if frozen { let _ = cargo.arg("--frozen"); } if locked { let _ = cargo.arg("--locked"); } if offline { let _ = cargo.arg("--offline"); } for config in config { let _ = cargo.arg(format!("--config={}", config)); } for unstable_features in unstable_features { let mut arg = OsString::from("-Z"); arg.push(unstable_features); let _ = cargo.arg(arg); } for bin in bin { let _ = cargo.arg(format!("--bin={}", bin)); } for example in example { let _ = cargo.arg(format!("--example={}", example)); } for package in package { let _ = cargo.arg(format!("--package={}", package)); } if let Some(jobs) = jobs { let _ = cargo.arg(format!("--jobs={}", jobs)); } if release { let _ = cargo.arg("--release"); } if let Some(profile) = profile { let _ = cargo.arg(format!("--profile={}", profile)); } for features in features { let _ = cargo.arg(format!("--features={}", features)); } if all_features { let _ = cargo.arg("--all-features"); } if no_default_features { let _ = cargo.arg("--no-default-features"); } if let Some(target) = target { let _ = cargo.arg(format!("--target={}", target)); } if let Some(target_dir) = target_dir { let _ = cargo.arg(format!("--target-dir={}", target_dir)); } if let Some(manifest_path) = manifest_path { let _ = cargo.arg(format!("--manifest-path={}", manifest_path)); } cargo } // https://github.com/rust-lang/cargo/blob/7059559d71de3fffe8c8cb81e32f323454aa96c5/src/cargo/util/command_prelude.rs trait ArgExt: Sized { fn opt(name: &'static str, help: &'static str) -> Self; fn optional_multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self; fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self; fn targets_bin_example(bin: &'static str, example: &'static str) -> [Self; 2]; fn package(package: &'static str) -> Self; fn jobs() -> Self; fn release(release: &'static str) -> Self; fn profile(profile: &'static str) -> Self; fn features() -> [Self; 3]; fn target_triple(target: &'static str) -> Self; fn target_dir() -> Self; fn manifest_path() -> Self; } impl<'a, 'b> ArgExt for Arg<'a, 'b> { fn opt(name: &'static str, help: &'static str) -> Self { Arg::with_name(name).long(name).help(help) } fn optional_multi_opt( name: &'static str, value_name: &'static str, help: &'static str, ) -> Self { Self::opt(name, help) .value_name(value_name) .multiple(true) .min_values(0) .number_of_values(1) } fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self { // Note that all `.multiple(true)` arguments in Cargo should specify // `.number_of_values(1)` as well, so that `--foo val1 val2` is // *not* parsed as `foo` with values ["val1", "val2"]. // `number_of_values` should become the default in clap 3. Self::opt(name, help) .value_name(value_name) .multiple(true) .number_of_values(1) } fn targets_bin_example(bin: &'static str, example: &'static str) -> [Self; 2] { [ Self::optional_multi_opt("bin", "NAME", bin), Self::optional_multi_opt("example", "NAME", example), ] } fn package(package: &'static str) -> Self { Self::opt("package", package).short("p").value_name("SPEC") } fn jobs() -> Self { Self::opt("jobs", "Number of parallel jobs, defaults to # of CPUs") .short("j") .value_name("N") } fn release(release: &'static str) -> Self { Self::opt("release", release) } fn profile(profile: &'static str) -> Self { Self::opt("profile", profile).value_name("PROFILE-NAME") } fn features() -> [Self; 3] { [ Self::multi_opt( "features", "FEATURES", "Space-separated list of features to activate", ), Self::opt("all-features", "Activate all available features"), Self::opt( "no-default-features", "Do not activate the `default` feature", ), ] } fn target_triple(target: &'static str) -> Self { Self::opt("target", target).value_name("TRIPLE") } fn target_dir() -> Self { Self::opt("target-dir", "Directory for all generated artifacts").value_name("DIRECTORY") } fn manifest_path() -> Self { Self::opt("manifest-path", "Path to Cargo.toml").value_name("PATH") } }
and `--example` specifies the example target to run. At most one of `--bin` or `--example` can be provided. All the arguments following the two dashes (`--`) are passed to the binary to run. If you're passing arguments to both Cargo and the binary, the ones after
random_line_split
cargo-deploy.rs
//! # `cargo deploy` //! Run a binary on a constellation cluster //! //! ## Usage //! ```text //! cargo deploy [options] <host> [--] [<args>]... //! ``` //! //! ## Options //! ```text //! -h --help Show this screen. //! -V --version Show version. //! --format=<fmt> Output format [possible values: human, json] [defa ult: human] //! ``` //! //! Note: --format can also be given as an env var, such as `CONSTELLATION_FORMAT=json` #![warn( missing_copy_implementations, missing_debug_implementations, missing_docs, trivial_numeric_casts, unused_extern_crates, unused_import_braces, unused_qualifications, unused_results, clippy::pedantic )] // from https://github.com/rust-unofficial/patterns/blob/master/anti_patterns/deny-warnings.md use clap::{crate_version, App, AppSettings, Arg, ArgMatches, SubCommand}; use std::{ convert::TryInto, env, ffi::{OsStr, OsString}, iter, net::SocketAddr, process }; use constellation_internal::Format; fn main() { let args = cli().get_matches(); let args = args.subcommand_matches("deploy").unwrap(); let host: SocketAddr = args.value_of("host").unwrap().parse().unwrap(); let forward_args: Vec<&OsStr> = args.values_of_os("args").unwrap_or_default().collect(); let output = cargo(args) .stderr(process::Stdio::inherit()) .output() .expect("Failed to invoke cargo"); if !output.status.success() { process::exit(output.status.code().unwrap_or(101)); } let mut bin = Vec::new(); for message in serde_json::Deserializer::from_slice(&output.stdout).into_iter() { if let cargo_metadata::Message::CompilerArtifact(artifact) = message.unwrap_or_else(|_| panic!("Failed to parse output of cargo")) { if artifact.target.kind == vec![String::from("bin")] || artifact.target.kind == vec![String::from("example")] { bin.push(( artifact.target.name, artifact.filenames.into_iter().next().unwrap(), )); // We're assuming the first filename is the binary – .dSYM etc seem to always be second? } } } if bin.len() > 1 { let names = bin .into_iter() .map(|(target_name, _)| target_name) .collect::<Vec<_>>(); println!( "`cargo deploy` could not determine which binary to run. \ Use the `--bin` option to specify a binary.\n\ available binaries: {}", names.join(", ") ); // , or the `default-run` manifest key // TODO: work out best way to get this / behave same as cargo run process::exit(1); } else if bin.is_empty() { println!("a bin target must be available for `cargo deploy`"); process::exit(1); } let path = bin.into_iter().next().unwrap().1; let args: Vec<OsString> = iter::once(OsString::from(&path)) .chain(forward_args.into_iter().map(ToOwned::to_owned)) .collect(); let vars: Vec<(OsString, OsString)> = env::vars_os().collect(); let format = Format::Human; constellation::deploy(host, &path, format, args, vars); } fn cli<'a, 'b>() -> App<'a, 'b> { // https://github.com/rust-lang/cargo/blob/7059559d71de3fffe8c8cb81e32f323454aa96c5/src/bin/cargo/cli.rs#L205-L277 // https://github.com/rust-lang/cargo/blob/982622252a64d7c526c04a244f1a81523dc9ae54/src/bin/cargo/commands/run.rs App::new("cargo") .bin_name("cargo") .settings(&[ AppSettings::UnifiedHelpMessage, AppSettings::DeriveDisplayOrder, AppSettings::SubcommandRequired, ]) .arg( Arg::opt( "verbose", "Use verbose output (-vv very verbose/build.rs output)", ) .short("v") .multiple(true) .global(true), ) .arg( Arg::opt("color", "Coloring: auto, always, never") .value_name("WHEN") .global(true), ) .arg(Arg::opt("frozen", "Require Cargo.lock and cache are up to date").global(true)) .arg(Arg::opt("locked", "Require Cargo.lock is up to date").global(true)) .arg(Arg::opt("offline", "Run without accessing the network").global(true)) .arg( Arg::multi_opt("config", "KEY=VALUE", "Override a configuration value") .global(true) .hidden(true), ) .arg( Arg::with_name("unstable-features") .help("Unstable (nightly-only) flags to Cargo, see 'cargo -Z help' for details") .short("Z") .value_name("FLAG") .multiple(true) .number_of_values(1) .global(true), ) .subcommand( SubCommand::with_name("deploy") .settings(&[ AppSettings::UnifiedHelpMessage, AppSettings::DeriveDisplayOrder, AppSettings::DontCollapseArgsInUsage, AppSettings::TrailingVarArg, ]) .version(crate_version!()) .about("Run a binary or example of the local package on a constellation cluster") // .arg(Arg::opt("quiet", "No output printed to stdout").short("q")) .arg( Arg::with_name("host") .help("Constellation cluster node to connect to (e.g. 10.0.0.1:8888)") .required(true) .validator(|host| { host.parse::<SocketAddr>() .map(drop) .map_err(|err| err.to_string()) }), ) .arg(Arg::with_name("args").multiple(true)) .args(&Arg::targets_bin_example( "Name of the bin target to run", "Name of the example target to run", )) .arg(Arg::package("Package with the target to run")) .arg(Arg::jobs()) .arg(Arg::release( "Build artifacts in release mode, with optimizations", )) .arg(Arg::profile("Build artifacts with the specified profile")) .args(&Arg::features()) .arg(Arg::target_triple("Build for the target triple")) .arg(Arg::target_dir()) .arg(Arg::manifest_path()) // .arg(Arg::message_format()) .after_help( "\ If neither `--bin` nor `--example` are given, then if the package only has one bin target it will be run. Otherwise `--bin` specifies the bin target to run, and `--example` specifies the example target to run. At most one of `--bin` or `--example` can be provided. All the arguments following the two dashes (`--`) are passed to the binary to run. If you're passing arguments to both Cargo and the binary, the ones after `--` go to the binary, the ones before go to Cargo. ", ), ) } fn cargo(args: &ArgMatches) -> process::Command { let verbose: u64 = args.occurrences_of("verbose"); let color: Option<&str> = args.value_of("color"); let frozen: bool = args.is_present("frozen"); let locked: bool = args.is_present("locked"); let offline: bool = args.is_present("offline"); let config: Vec<&str> = args.values_of("config").unwrap_or_default().collect(); let unstable_features: Vec<&OsStr> = args .values_of_os("unstable-features") .unwrap_or_default() .collect(); let bin: Vec<&str> = args.values_of("bin").unwrap_or_default().collect(); let example: Vec<&str> = args.values_of("example").unwrap_or_default().collect(); let package: Vec<&str> = args.values_of("package").unwrap_or_default().collect(); let jobs: Option<&str> = args.value_of("jobs"); let release: bool = args.is_present("release"); let profile: Option<&str> = args.value_of("profile"); let features: Vec<&str> = args.values_of("features").unwrap_or_default().collect(); let all_features: bool = args.is_present("all-features"); let no_default_features: bool = args.is_present("no-default-features"); let target: Option<&str> = args.value_of("target"); let target_dir: Option<&str> = args.value_of("target-dir"); let manifest_path: Option<&str> = args.value_of("manifest-path"); // let mut args: Vec<String> = Vec::new(); let mut cargo = process::Command::new("cargo"); let _ = cargo.arg("build"); let _ = cargo.arg("--message-format=json"); if verbose > 0 { let _ = cargo.arg(format!("-{}", "v".repeat(verbose.try_into().unwrap()))); } if let Some(color) = color { let _ = cargo.arg(format!("--color={}", color)); } if frozen { let _ = cargo.arg("--frozen"); } if locked { let _ = cargo.arg("--locked"); } if offline { let _ = cargo.arg("--offline"); } for config in config { let _ = cargo.arg(format!("--config={}", config)); } for unstable_features in unstable_features { let mut arg = OsString::from("-Z"); arg.push(unstable_features); let _ = cargo.arg(arg); } for bin in bin { let _ = cargo.arg(format!("--bin={}", bin)); } for example in example { let _ = cargo.arg(format!("--example={}", example)); } for package in package { let _ = cargo.arg(format!("--package={}", package)); } if let Some(jobs) = jobs { let _ = cargo.arg(format!("--jobs={}", jobs)); } if release { let _ = cargo.arg("--release"); } if let Some(profile) = profile { let _ = cargo.arg(format!("--profile={}", profile)); } for features in features { let _ = cargo.arg(format!("--features={}", features)); } if all_features { let _ = cargo.arg("--all-features"); } if no_default_features { let _ = cargo.arg("--no-default-features"); } if let Some(target) = target { let _ = cargo.arg(format!("--target={}", target)); } if let Some(target_dir) = target_dir { let _ = cargo.arg(format!("--target-dir={}", target_dir)); } if let Some(manifest_path) = manifest_path { let _ = cargo.arg(format!("--manifest-path={}", manifest_path)); } cargo } // https://github.com/rust-lang/cargo/blob/7059559d71de3fffe8c8cb81e32f323454aa96c5/src/cargo/util/command_prelude.rs trait ArgExt: Sized { fn opt(name: &'static str, help: &'static str) -> Self; fn optional_multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self; fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self; fn targets_bin_example(bin: &'static str, example: &'static str) -> [Self; 2]; fn package(package: &'static str) -> Self; fn jobs() -> Self; fn release(release: &'static str) -> Self; fn profile(profile: &'static str) -> Self; fn features() -> [Self; 3]; fn target_triple(target: &'static str) -> Self; fn target_dir() -> Self; fn manifest_path() -> Self; } impl<'a, 'b> ArgExt for Arg<'a, 'b> { fn opt(name: &'static str, help: &'static str) -> Self { Arg::with_name(name).long(name).help(help) } fn optional_multi_opt( name: &'static str, value_name: &'static str, help: &'static str, ) -> Self { Self::opt(name, help) .value_name(value_name) .multiple(true) .min_values(0) .number_of_values(1) } fn multi_opt(name: &'static str, value_name: &'static str, help: &'static str) -> Self { // Note that all `.multiple(true)` arguments in Cargo should specify // `.number_of_values(1)` as well, so that `--foo val1 val2` is // *not* parsed as `foo` with values ["val1", "val2"]. // `number_of_values` should become the default in clap 3. Self::opt(name, help) .value_name(value_name) .multiple(true) .number_of_values(1) } fn targets_bin_example(bin: &'static str, example: &'static str) -> [Self; 2] { [ Self::optional_multi_opt("bin", "NAME", bin), Self::optional_multi_opt("example", "NAME", example), ] } fn package(package: &'static str) -> Self { Self::opt("package", package).short("p").value_name("SPEC") } fn jobs() -> Self { Self::opt("jobs", "Number of parallel jobs, defaults to # of CPUs") .short("j") .value_name("N") } fn release(release: &'static str) -> Self { Self::opt("release", release) } fn profile(profile: &'static str) -> Self { Self::opt("profile", profile).value_name("PROFILE-NAME") } fn features() -> [Self; 3] { [ Self::multi_opt( "features", "FEATURES", "Space-separated list of features to activate", ), Self::opt("all-features", "Activate all available features"), Self::opt( "no-default-features", "Do not activate the `default` feature", ), ] } fn tar
rget: &'static str) -> Self { Self::opt("target", target).value_name("TRIPLE") } fn target_dir() -> Self { Self::opt("target-dir", "Directory for all generated artifacts").value_name("DIRECTORY") } fn manifest_path() -> Self { Self::opt("manifest-path", "Path to Cargo.toml").value_name("PATH") } }
get_triple(ta
identifier_name
generator.rs
use std::ffi::{OsStr, OsString}; use std::fs::{File, OpenOptions}; use std::io::{BufRead, BufReader, BufWriter, Write}; use std::ops::Deref; use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::Arc; use std::time::Instant; use std::{env, fs, io, thread}; use opencv_binding_generator::{Generator, IteratorExt}; use crate::docs::transfer_bindings_to_docs; use super::{files_with_extension, files_with_predicate, Library, Result, MODULES, OUT_DIR, SRC_CPP_DIR, SRC_DIR}; pub struct BindingGenerator { build_script_path: OsString, } impl BindingGenerator { pub fn new(build_script_path: OsString) -> Self { Self { build_script_path } } pub fn generate_wrapper(&self, opencv_header_dir: &Path, opencv: &Library) -> Result<()> { let target_docs_dir = env::var_os("OCVRS_DOCS_GENERATE_DIR").map(PathBuf::from); let target_module_dir = OUT_DIR.join("opencv"); let manual_dir = SRC_DIR.join("manual"); eprintln!("=== Generating code in: {}", OUT_DIR.display()); eprintln!("=== Placing generated bindings into: {}", target_module_dir.display()); if let Some(target_docs_dir) = target_docs_dir.as_ref() { eprintln!( "=== Placing static generated docs bindings into: {}", target_docs_dir.display() ); } eprintln!("=== Using OpenCV headers from: {}", opencv_header_dir.display()); let non_dll_files = files_with_predicate(&OUT_DIR, |p| { p.extension().map_or(true, |ext| !ext.eq_ignore_ascii_case("dll")) })?; for path in non_dll_files { let _ = fs::remove_file(path); } let modules = MODULES.get().expect("MODULES not initialized"); self.run(modules, opencv_header_dir, opencv)?; collect_generated_bindings(modules, &target_module_dir, &manual_dir)?; if let Some(target_docs_dir) = target_docs_dir { if !target_docs_dir.exists() { fs::create_dir(&target_docs_dir)?; } transfer_bindings_to_docs(&OUT_DIR, &target_docs_dir); } Ok(()) } fn run(&self, modules: &'static [String], opencv_header_dir: &Path, opencv: &Library) -> Result<()> { let additional_include_dirs = opencv .include_paths .iter() .map(|path| path.as_path()) .filter(|&include_path| include_path != opencv_header_dir) .collect::<Vec<_>>(); let gen = Generator::new(opencv_header_dir, &additional_include_dirs, &SRC_CPP_DIR); eprintln!("=== Clang: {}", gen.clang_version()); eprintln!("=== Clang command line args: {:#?}", gen.build_clang_command_line_args()); let additional_include_dirs = Arc::new( additional_include_dirs .into_iter() .map(|p| p.to_str().expect("Can't convert additional include dir to UTF-8 string")) .join(","), ); let opencv_header_dir = Arc::new(opencv_header_dir.to_owned()); let job_server = build_job_server()?; let mut join_handles = Vec::with_capacity(modules.len()); let start = Instant::now(); // todo use thread::scope when MSRV is 1.63 eprintln!("=== Generating {} modules", modules.len()); modules.iter().for_each(|module| { let token = job_server.acquire().expect("Can't acquire token from job server"); let join_handle = thread::spawn({ let additional_include_dirs = Arc::clone(&additional_include_dirs); let opencv_header_dir = Arc::clone(&opencv_header_dir); let build_script_path = self.build_script_path.clone(); move || { let module_start = Instant::now(); let mut bin_generator = Command::new(build_script_path); bin_generator .arg(&*opencv_header_dir) .arg(&*SRC_CPP_DIR) .arg(&*OUT_DIR) .arg(module) .arg(&*additional_include_dirs); eprintln!("=== Running: {bin_generator:?}"); let res = bin_generator .status() .unwrap_or_else(|e| panic!("Can't run bindings generator for module: {module}, error: {e}")); if !res.success() { panic!("Failed to run the bindings generator for module: {module}"); } eprintln!("=== Generated: {module} in {:?}", module_start.elapsed()); drop(token); // needed to move the token to the thread } }); join_handles.push(join_handle); }); for join_handle in join_handles { join_handle.join().expect("Generator process panicked"); } eprintln!("=== Total binding generation time: {:?}", start.elapsed()); Ok(()) } } fn is_type_file(path: &Path, module: &str) -> bool { path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| { let mut stem_chars = stem.chars(); (&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits matches!(stem_chars.next(), Some('-')) && // dash module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name matches!(stem_chars.next(), Some('-')) && // dash stem.ends_with(".type") // ends with ".type" }) } fn is_type_externs_file(path: &Path, module: &str) -> bool { path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| { let mut stem_chars = stem.chars(); (&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits matches!(stem_chars.next(), Some('-')) && // dash module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name matches!(stem_chars.next(), Some('-')) && // dash stem.ends_with(".type.externs") // ends with ".type" }) } fn copy_indent(mut read: impl BufRead, mut write: impl Write, indent: &str) -> Result<()> { let mut line = Vec::with_capacity(100); while read.read_until(b'\n', &mut line)? != 0 { write.write_all(indent.as_bytes())?; write.write_all(&line)?; line.clear(); } Ok(()) } fn collect_generated_bindings(modules: &[String], target_module_dir: &Path, manual_dir: &Path) -> Result<()> { if !target_module_dir.exists() { fs::create_dir(target_module_dir)?; } for path in files_with_extension(target_module_dir, "rs")? { let _ = fs::remove_file(path); } fn write_has_module(mut write: impl Write, module: &str) -> Result<()> { Ok(writeln!(write, "#[cfg(ocvrs_has_module_{module})]")?) } fn write_module_include(write: &mut BufWriter<File>, module: &str) -> Result<()>
let add_manual = |file: &mut BufWriter<File>, module: &str| -> Result<bool> { if manual_dir.join(format!("{module}.rs")).exists() { writeln!(file, "pub use crate::manual::{module}::*;")?; Ok(true) } else { Ok(false) } }; let start = Instant::now(); let mut hub_rs = BufWriter::new(File::create(target_module_dir.join("hub.rs"))?); let mut types_rs = BufWriter::new(File::create(target_module_dir.join("types.rs"))?); writeln!(types_rs)?; let mut sys_rs = BufWriter::new(File::create(target_module_dir.join("sys.rs"))?); writeln!(sys_rs, "use crate::{{mod_prelude_sys::*, core}};")?; writeln!(sys_rs)?; for module in modules { // merge multiple *-type.cpp files into a single module_types.hpp let module_cpp = OUT_DIR.join(format!("{module}.cpp")); if module_cpp.is_file() { let module_types_cpp = OUT_DIR.join(format!("{module}_types.hpp")); let mut module_types_file = BufWriter::new( OpenOptions::new() .create(true) .truncate(true) .write(true) .open(module_types_cpp)?, ); let mut type_files = files_with_extension(&OUT_DIR, "cpp")? .filter(|f| is_type_file(f, module)) .collect::<Vec<_>>(); type_files.sort_unstable(); for entry in type_files { io::copy(&mut BufReader::new(File::open(&entry)?), &mut module_types_file)?; let _ = fs::remove_file(entry); } } // add module entry to hub.rs and move the module file into opencv/ write_has_module(&mut hub_rs, module)?; write_module_include(&mut hub_rs, module)?; let module_filename = format!("{module}.rs"); let module_src_file = OUT_DIR.join(&module_filename); let mut module_rs = BufWriter::new(File::create(&target_module_dir.join(&module_filename))?); // Need to wrap modules inside `mod { }` because they have top-level comments (//!) and those don't play well when // module file is include!d (as opposed to connecting the module with `mod` from the parent module). // The same doesn't apply to `sys` and `types` below because they don't contain top-level comments. writeln!(module_rs, "pub mod {module} {{")?; copy_indent(BufReader::new(File::open(&module_src_file)?), &mut module_rs, "\t")?; add_manual(&mut module_rs, module)?; writeln!(module_rs, "}}")?; let _ = fs::remove_file(module_src_file); // merge multiple *-.type.rs files into a single types.rs let mut header_written = false; let mut type_files = files_with_extension(&OUT_DIR, "rs")? .filter(|f| is_type_file(f, module)) .collect::<Vec<_>>(); type_files.sort_unstable(); for entry in type_files { if entry.metadata().map(|meta| meta.len()).unwrap_or(0) > 0 { if !header_written { write_has_module(&mut types_rs, module)?; writeln!(types_rs, "mod {module}_types {{")?; writeln!(types_rs, "\tuse crate::{{mod_prelude::*, core, types, sys}};")?; writeln!(types_rs)?; header_written = true; } copy_indent(BufReader::new(File::open(&entry)?), &mut types_rs, "\t")?; } let _ = fs::remove_file(entry); } if header_written { writeln!(types_rs, "}}")?; write_has_module(&mut types_rs, module)?; writeln!(types_rs, "pub use {module}_types::*;")?; writeln!(types_rs)?; } // merge module-specific *.externs.rs and generated type-specific *.type.externs.rs into a single sys.rs let externs_rs = OUT_DIR.join(format!("{module}.externs.rs")); write_has_module(&mut sys_rs, module)?; writeln!(sys_rs, "mod {module}_sys {{")?; writeln!(sys_rs, "\tuse super::*;")?; writeln!(sys_rs)?; writeln!(sys_rs, "\textern \"C\" {{")?; copy_indent(BufReader::new(File::open(&externs_rs)?), &mut sys_rs, "\t\t")?; let _ = fs::remove_file(externs_rs); let mut type_extern_files = files_with_extension(&OUT_DIR, "rs")? .filter(|f| is_type_externs_file(f, module)) .collect::<Vec<_>>(); type_extern_files.sort_unstable(); for entry in type_extern_files { if entry.metadata().map(|meta| meta.len()).unwrap_or(0) > 0 { copy_indent(BufReader::new(File::open(&entry)?), &mut sys_rs, "\t\t")?; } let _ = fs::remove_file(entry); } writeln!(sys_rs, "\t}}")?; writeln!(sys_rs, "}}")?; write_has_module(&mut sys_rs, module)?; writeln!(sys_rs, "pub use {module}_sys::*;")?; writeln!(sys_rs)?; } writeln!(hub_rs, "pub mod types {{")?; write_module_include(&mut hub_rs, "types")?; writeln!(hub_rs, "}}")?; writeln!(hub_rs, "#[doc(hidden)]")?; writeln!(hub_rs, "pub mod sys {{")?; write_module_include(&mut hub_rs, "sys")?; writeln!(hub_rs, "}}")?; add_manual(&mut types_rs, "types")?; add_manual(&mut sys_rs, "sys")?; // write hub_prelude that imports all module-specific preludes writeln!(hub_rs, "pub mod hub_prelude {{")?; for module in modules { write!(hub_rs, "\t")?; write_has_module(&mut hub_rs, module)?; writeln!(hub_rs, "\tpub use super::{module}::prelude::*;")?; } writeln!(hub_rs, "}}")?; eprintln!("=== Total binding collection time: {:?}", start.elapsed()); Ok(()) } fn build_job_server() -> Result<Jobserver> { unsafe { jobserver::Client::from_env() } .and_then(|client| { let own_token_released = client.release_raw().is_ok(); let available_jobs = client.available().unwrap_or(0); if available_jobs > 0 { eprintln!("=== Using environment job server with the the amount of available jobs: {available_jobs}"); Some(Jobserver { client, reacquire_token_on_drop: own_token_released, }) } else { client.acquire_raw().expect("Can't reacquire build script thread token"); eprintln!( "=== Available jobs from the environment created jobserver is: {available_jobs} or there is an error reading that value" ); None } }) .or_else(|| { let num_jobs = env::var("NUM_JOBS") .ok() .and_then(|jobs| jobs.parse().ok()) .unwrap_or(2) .max(1); eprintln!("=== Creating a new job server with num_jobs: {num_jobs}"); jobserver::Client::new(num_jobs).ok().map(|client| Jobserver { client, reacquire_token_on_drop: false, }) }) .ok_or_else(|| "Can't create job server".into()) } pub struct Jobserver { client: jobserver::Client, reacquire_token_on_drop: bool, } impl Drop for Jobserver { fn drop(&mut self) { if self.reacquire_token_on_drop { self.client.acquire_raw().expect("Can't reacquire build script thread token"); } } } impl Deref for Jobserver { type Target = jobserver::Client; fn deref(&self) -> &Self::Target { &self.client } }
{ // Use include instead of #[path] attribute because rust-analyzer doesn't handle #[path] inside other include! too well: // https://github.com/twistedfall/opencv-rust/issues/418 // https://github.com/rust-lang/rust-analyzer/issues/11682 Ok(writeln!( write, r#"include!(concat!(env!("OUT_DIR"), "/opencv/{module}.rs"));"# )?) }
identifier_body
generator.rs
use std::ffi::{OsStr, OsString}; use std::fs::{File, OpenOptions}; use std::io::{BufRead, BufReader, BufWriter, Write}; use std::ops::Deref; use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::Arc; use std::time::Instant; use std::{env, fs, io, thread}; use opencv_binding_generator::{Generator, IteratorExt}; use crate::docs::transfer_bindings_to_docs; use super::{files_with_extension, files_with_predicate, Library, Result, MODULES, OUT_DIR, SRC_CPP_DIR, SRC_DIR}; pub struct BindingGenerator { build_script_path: OsString, } impl BindingGenerator { pub fn new(build_script_path: OsString) -> Self { Self { build_script_path } } pub fn generate_wrapper(&self, opencv_header_dir: &Path, opencv: &Library) -> Result<()> { let target_docs_dir = env::var_os("OCVRS_DOCS_GENERATE_DIR").map(PathBuf::from); let target_module_dir = OUT_DIR.join("opencv"); let manual_dir = SRC_DIR.join("manual"); eprintln!("=== Generating code in: {}", OUT_DIR.display()); eprintln!("=== Placing generated bindings into: {}", target_module_dir.display()); if let Some(target_docs_dir) = target_docs_dir.as_ref() { eprintln!( "=== Placing static generated docs bindings into: {}", target_docs_dir.display() ); } eprintln!("=== Using OpenCV headers from: {}", opencv_header_dir.display()); let non_dll_files = files_with_predicate(&OUT_DIR, |p| { p.extension().map_or(true, |ext| !ext.eq_ignore_ascii_case("dll")) })?; for path in non_dll_files { let _ = fs::remove_file(path); } let modules = MODULES.get().expect("MODULES not initialized"); self.run(modules, opencv_header_dir, opencv)?; collect_generated_bindings(modules, &target_module_dir, &manual_dir)?; if let Some(target_docs_dir) = target_docs_dir { if !target_docs_dir.exists() { fs::create_dir(&target_docs_dir)?; } transfer_bindings_to_docs(&OUT_DIR, &target_docs_dir); } Ok(()) } fn run(&self, modules: &'static [String], opencv_header_dir: &Path, opencv: &Library) -> Result<()> { let additional_include_dirs = opencv .include_paths .iter() .map(|path| path.as_path()) .filter(|&include_path| include_path != opencv_header_dir) .collect::<Vec<_>>(); let gen = Generator::new(opencv_header_dir, &additional_include_dirs, &SRC_CPP_DIR); eprintln!("=== Clang: {}", gen.clang_version()); eprintln!("=== Clang command line args: {:#?}", gen.build_clang_command_line_args()); let additional_include_dirs = Arc::new( additional_include_dirs .into_iter() .map(|p| p.to_str().expect("Can't convert additional include dir to UTF-8 string")) .join(","), ); let opencv_header_dir = Arc::new(opencv_header_dir.to_owned()); let job_server = build_job_server()?; let mut join_handles = Vec::with_capacity(modules.len()); let start = Instant::now(); // todo use thread::scope when MSRV is 1.63 eprintln!("=== Generating {} modules", modules.len()); modules.iter().for_each(|module| { let token = job_server.acquire().expect("Can't acquire token from job server"); let join_handle = thread::spawn({ let additional_include_dirs = Arc::clone(&additional_include_dirs); let opencv_header_dir = Arc::clone(&opencv_header_dir); let build_script_path = self.build_script_path.clone(); move || { let module_start = Instant::now(); let mut bin_generator = Command::new(build_script_path); bin_generator .arg(&*opencv_header_dir) .arg(&*SRC_CPP_DIR) .arg(&*OUT_DIR) .arg(module) .arg(&*additional_include_dirs); eprintln!("=== Running: {bin_generator:?}"); let res = bin_generator .status() .unwrap_or_else(|e| panic!("Can't run bindings generator for module: {module}, error: {e}")); if !res.success() { panic!("Failed to run the bindings generator for module: {module}"); } eprintln!("=== Generated: {module} in {:?}", module_start.elapsed()); drop(token); // needed to move the token to the thread } }); join_handles.push(join_handle); }); for join_handle in join_handles { join_handle.join().expect("Generator process panicked"); } eprintln!("=== Total binding generation time: {:?}", start.elapsed()); Ok(()) } } fn is_type_file(path: &Path, module: &str) -> bool { path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| { let mut stem_chars = stem.chars(); (&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits matches!(stem_chars.next(), Some('-')) && // dash module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name matches!(stem_chars.next(), Some('-')) && // dash stem.ends_with(".type") // ends with ".type" }) } fn is_type_externs_file(path: &Path, module: &str) -> bool { path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| { let mut stem_chars = stem.chars(); (&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits matches!(stem_chars.next(), Some('-')) && // dash module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name matches!(stem_chars.next(), Some('-')) && // dash stem.ends_with(".type.externs") // ends with ".type" }) } fn copy_indent(mut read: impl BufRead, mut write: impl Write, indent: &str) -> Result<()> { let mut line = Vec::with_capacity(100); while read.read_until(b'\n', &mut line)? != 0 { write.write_all(indent.as_bytes())?; write.write_all(&line)?; line.clear(); } Ok(()) } fn collect_generated_bindings(modules: &[String], target_module_dir: &Path, manual_dir: &Path) -> Result<()> { if !target_module_dir.exists() { fs::create_dir(target_module_dir)?; } for path in files_with_extension(target_module_dir, "rs")? { let _ = fs::remove_file(path); } fn
(mut write: impl Write, module: &str) -> Result<()> { Ok(writeln!(write, "#[cfg(ocvrs_has_module_{module})]")?) } fn write_module_include(write: &mut BufWriter<File>, module: &str) -> Result<()> { // Use include instead of #[path] attribute because rust-analyzer doesn't handle #[path] inside other include! too well: // https://github.com/twistedfall/opencv-rust/issues/418 // https://github.com/rust-lang/rust-analyzer/issues/11682 Ok(writeln!( write, r#"include!(concat!(env!("OUT_DIR"), "/opencv/{module}.rs"));"# )?) } let add_manual = |file: &mut BufWriter<File>, module: &str| -> Result<bool> { if manual_dir.join(format!("{module}.rs")).exists() { writeln!(file, "pub use crate::manual::{module}::*;")?; Ok(true) } else { Ok(false) } }; let start = Instant::now(); let mut hub_rs = BufWriter::new(File::create(target_module_dir.join("hub.rs"))?); let mut types_rs = BufWriter::new(File::create(target_module_dir.join("types.rs"))?); writeln!(types_rs)?; let mut sys_rs = BufWriter::new(File::create(target_module_dir.join("sys.rs"))?); writeln!(sys_rs, "use crate::{{mod_prelude_sys::*, core}};")?; writeln!(sys_rs)?; for module in modules { // merge multiple *-type.cpp files into a single module_types.hpp let module_cpp = OUT_DIR.join(format!("{module}.cpp")); if module_cpp.is_file() { let module_types_cpp = OUT_DIR.join(format!("{module}_types.hpp")); let mut module_types_file = BufWriter::new( OpenOptions::new() .create(true) .truncate(true) .write(true) .open(module_types_cpp)?, ); let mut type_files = files_with_extension(&OUT_DIR, "cpp")? .filter(|f| is_type_file(f, module)) .collect::<Vec<_>>(); type_files.sort_unstable(); for entry in type_files { io::copy(&mut BufReader::new(File::open(&entry)?), &mut module_types_file)?; let _ = fs::remove_file(entry); } } // add module entry to hub.rs and move the module file into opencv/ write_has_module(&mut hub_rs, module)?; write_module_include(&mut hub_rs, module)?; let module_filename = format!("{module}.rs"); let module_src_file = OUT_DIR.join(&module_filename); let mut module_rs = BufWriter::new(File::create(&target_module_dir.join(&module_filename))?); // Need to wrap modules inside `mod { }` because they have top-level comments (//!) and those don't play well when // module file is include!d (as opposed to connecting the module with `mod` from the parent module). // The same doesn't apply to `sys` and `types` below because they don't contain top-level comments. writeln!(module_rs, "pub mod {module} {{")?; copy_indent(BufReader::new(File::open(&module_src_file)?), &mut module_rs, "\t")?; add_manual(&mut module_rs, module)?; writeln!(module_rs, "}}")?; let _ = fs::remove_file(module_src_file); // merge multiple *-.type.rs files into a single types.rs let mut header_written = false; let mut type_files = files_with_extension(&OUT_DIR, "rs")? .filter(|f| is_type_file(f, module)) .collect::<Vec<_>>(); type_files.sort_unstable(); for entry in type_files { if entry.metadata().map(|meta| meta.len()).unwrap_or(0) > 0 { if !header_written { write_has_module(&mut types_rs, module)?; writeln!(types_rs, "mod {module}_types {{")?; writeln!(types_rs, "\tuse crate::{{mod_prelude::*, core, types, sys}};")?; writeln!(types_rs)?; header_written = true; } copy_indent(BufReader::new(File::open(&entry)?), &mut types_rs, "\t")?; } let _ = fs::remove_file(entry); } if header_written { writeln!(types_rs, "}}")?; write_has_module(&mut types_rs, module)?; writeln!(types_rs, "pub use {module}_types::*;")?; writeln!(types_rs)?; } // merge module-specific *.externs.rs and generated type-specific *.type.externs.rs into a single sys.rs let externs_rs = OUT_DIR.join(format!("{module}.externs.rs")); write_has_module(&mut sys_rs, module)?; writeln!(sys_rs, "mod {module}_sys {{")?; writeln!(sys_rs, "\tuse super::*;")?; writeln!(sys_rs)?; writeln!(sys_rs, "\textern \"C\" {{")?; copy_indent(BufReader::new(File::open(&externs_rs)?), &mut sys_rs, "\t\t")?; let _ = fs::remove_file(externs_rs); let mut type_extern_files = files_with_extension(&OUT_DIR, "rs")? .filter(|f| is_type_externs_file(f, module)) .collect::<Vec<_>>(); type_extern_files.sort_unstable(); for entry in type_extern_files { if entry.metadata().map(|meta| meta.len()).unwrap_or(0) > 0 { copy_indent(BufReader::new(File::open(&entry)?), &mut sys_rs, "\t\t")?; } let _ = fs::remove_file(entry); } writeln!(sys_rs, "\t}}")?; writeln!(sys_rs, "}}")?; write_has_module(&mut sys_rs, module)?; writeln!(sys_rs, "pub use {module}_sys::*;")?; writeln!(sys_rs)?; } writeln!(hub_rs, "pub mod types {{")?; write_module_include(&mut hub_rs, "types")?; writeln!(hub_rs, "}}")?; writeln!(hub_rs, "#[doc(hidden)]")?; writeln!(hub_rs, "pub mod sys {{")?; write_module_include(&mut hub_rs, "sys")?; writeln!(hub_rs, "}}")?; add_manual(&mut types_rs, "types")?; add_manual(&mut sys_rs, "sys")?; // write hub_prelude that imports all module-specific preludes writeln!(hub_rs, "pub mod hub_prelude {{")?; for module in modules { write!(hub_rs, "\t")?; write_has_module(&mut hub_rs, module)?; writeln!(hub_rs, "\tpub use super::{module}::prelude::*;")?; } writeln!(hub_rs, "}}")?; eprintln!("=== Total binding collection time: {:?}", start.elapsed()); Ok(()) } fn build_job_server() -> Result<Jobserver> { unsafe { jobserver::Client::from_env() } .and_then(|client| { let own_token_released = client.release_raw().is_ok(); let available_jobs = client.available().unwrap_or(0); if available_jobs > 0 { eprintln!("=== Using environment job server with the the amount of available jobs: {available_jobs}"); Some(Jobserver { client, reacquire_token_on_drop: own_token_released, }) } else { client.acquire_raw().expect("Can't reacquire build script thread token"); eprintln!( "=== Available jobs from the environment created jobserver is: {available_jobs} or there is an error reading that value" ); None } }) .or_else(|| { let num_jobs = env::var("NUM_JOBS") .ok() .and_then(|jobs| jobs.parse().ok()) .unwrap_or(2) .max(1); eprintln!("=== Creating a new job server with num_jobs: {num_jobs}"); jobserver::Client::new(num_jobs).ok().map(|client| Jobserver { client, reacquire_token_on_drop: false, }) }) .ok_or_else(|| "Can't create job server".into()) } pub struct Jobserver { client: jobserver::Client, reacquire_token_on_drop: bool, } impl Drop for Jobserver { fn drop(&mut self) { if self.reacquire_token_on_drop { self.client.acquire_raw().expect("Can't reacquire build script thread token"); } } } impl Deref for Jobserver { type Target = jobserver::Client; fn deref(&self) -> &Self::Target { &self.client } }
write_has_module
identifier_name
generator.rs
use std::ffi::{OsStr, OsString}; use std::fs::{File, OpenOptions}; use std::io::{BufRead, BufReader, BufWriter, Write}; use std::ops::Deref; use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::Arc; use std::time::Instant; use std::{env, fs, io, thread}; use opencv_binding_generator::{Generator, IteratorExt}; use crate::docs::transfer_bindings_to_docs; use super::{files_with_extension, files_with_predicate, Library, Result, MODULES, OUT_DIR, SRC_CPP_DIR, SRC_DIR}; pub struct BindingGenerator { build_script_path: OsString, } impl BindingGenerator { pub fn new(build_script_path: OsString) -> Self { Self { build_script_path } } pub fn generate_wrapper(&self, opencv_header_dir: &Path, opencv: &Library) -> Result<()> { let target_docs_dir = env::var_os("OCVRS_DOCS_GENERATE_DIR").map(PathBuf::from); let target_module_dir = OUT_DIR.join("opencv"); let manual_dir = SRC_DIR.join("manual"); eprintln!("=== Generating code in: {}", OUT_DIR.display()); eprintln!("=== Placing generated bindings into: {}", target_module_dir.display()); if let Some(target_docs_dir) = target_docs_dir.as_ref() { eprintln!( "=== Placing static generated docs bindings into: {}", target_docs_dir.display() ); } eprintln!("=== Using OpenCV headers from: {}", opencv_header_dir.display()); let non_dll_files = files_with_predicate(&OUT_DIR, |p| { p.extension().map_or(true, |ext| !ext.eq_ignore_ascii_case("dll")) })?; for path in non_dll_files { let _ = fs::remove_file(path); } let modules = MODULES.get().expect("MODULES not initialized"); self.run(modules, opencv_header_dir, opencv)?; collect_generated_bindings(modules, &target_module_dir, &manual_dir)?; if let Some(target_docs_dir) = target_docs_dir { if !target_docs_dir.exists() { fs::create_dir(&target_docs_dir)?; } transfer_bindings_to_docs(&OUT_DIR, &target_docs_dir); } Ok(()) } fn run(&self, modules: &'static [String], opencv_header_dir: &Path, opencv: &Library) -> Result<()> { let additional_include_dirs = opencv .include_paths .iter() .map(|path| path.as_path()) .filter(|&include_path| include_path != opencv_header_dir) .collect::<Vec<_>>(); let gen = Generator::new(opencv_header_dir, &additional_include_dirs, &SRC_CPP_DIR); eprintln!("=== Clang: {}", gen.clang_version()); eprintln!("=== Clang command line args: {:#?}", gen.build_clang_command_line_args()); let additional_include_dirs = Arc::new( additional_include_dirs .into_iter() .map(|p| p.to_str().expect("Can't convert additional include dir to UTF-8 string")) .join(","), ); let opencv_header_dir = Arc::new(opencv_header_dir.to_owned()); let job_server = build_job_server()?; let mut join_handles = Vec::with_capacity(modules.len()); let start = Instant::now(); // todo use thread::scope when MSRV is 1.63 eprintln!("=== Generating {} modules", modules.len()); modules.iter().for_each(|module| { let token = job_server.acquire().expect("Can't acquire token from job server"); let join_handle = thread::spawn({ let additional_include_dirs = Arc::clone(&additional_include_dirs); let opencv_header_dir = Arc::clone(&opencv_header_dir); let build_script_path = self.build_script_path.clone(); move || { let module_start = Instant::now(); let mut bin_generator = Command::new(build_script_path); bin_generator .arg(&*opencv_header_dir) .arg(&*SRC_CPP_DIR) .arg(&*OUT_DIR) .arg(module) .arg(&*additional_include_dirs); eprintln!("=== Running: {bin_generator:?}"); let res = bin_generator .status() .unwrap_or_else(|e| panic!("Can't run bindings generator for module: {module}, error: {e}")); if !res.success() { panic!("Failed to run the bindings generator for module: {module}"); } eprintln!("=== Generated: {module} in {:?}", module_start.elapsed()); drop(token); // needed to move the token to the thread } }); join_handles.push(join_handle); }); for join_handle in join_handles { join_handle.join().expect("Generator process panicked"); } eprintln!("=== Total binding generation time: {:?}", start.elapsed()); Ok(()) } } fn is_type_file(path: &Path, module: &str) -> bool { path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| { let mut stem_chars = stem.chars(); (&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits matches!(stem_chars.next(), Some('-')) && // dash module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name matches!(stem_chars.next(), Some('-')) && // dash stem.ends_with(".type") // ends with ".type" }) } fn is_type_externs_file(path: &Path, module: &str) -> bool { path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| { let mut stem_chars = stem.chars(); (&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits matches!(stem_chars.next(), Some('-')) && // dash module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name matches!(stem_chars.next(), Some('-')) && // dash stem.ends_with(".type.externs") // ends with ".type" }) } fn copy_indent(mut read: impl BufRead, mut write: impl Write, indent: &str) -> Result<()> { let mut line = Vec::with_capacity(100); while read.read_until(b'\n', &mut line)? != 0 { write.write_all(indent.as_bytes())?; write.write_all(&line)?; line.clear(); } Ok(()) } fn collect_generated_bindings(modules: &[String], target_module_dir: &Path, manual_dir: &Path) -> Result<()> { if !target_module_dir.exists() { fs::create_dir(target_module_dir)?; } for path in files_with_extension(target_module_dir, "rs")? { let _ = fs::remove_file(path); } fn write_has_module(mut write: impl Write, module: &str) -> Result<()> { Ok(writeln!(write, "#[cfg(ocvrs_has_module_{module})]")?) } fn write_module_include(write: &mut BufWriter<File>, module: &str) -> Result<()> { // Use include instead of #[path] attribute because rust-analyzer doesn't handle #[path] inside other include! too well: // https://github.com/twistedfall/opencv-rust/issues/418 // https://github.com/rust-lang/rust-analyzer/issues/11682 Ok(writeln!( write, r#"include!(concat!(env!("OUT_DIR"), "/opencv/{module}.rs"));"# )?) } let add_manual = |file: &mut BufWriter<File>, module: &str| -> Result<bool> { if manual_dir.join(format!("{module}.rs")).exists() { writeln!(file, "pub use crate::manual::{module}::*;")?; Ok(true) } else { Ok(false) } }; let start = Instant::now(); let mut hub_rs = BufWriter::new(File::create(target_module_dir.join("hub.rs"))?); let mut types_rs = BufWriter::new(File::create(target_module_dir.join("types.rs"))?); writeln!(types_rs)?; let mut sys_rs = BufWriter::new(File::create(target_module_dir.join("sys.rs"))?); writeln!(sys_rs, "use crate::{{mod_prelude_sys::*, core}};")?; writeln!(sys_rs)?; for module in modules { // merge multiple *-type.cpp files into a single module_types.hpp let module_cpp = OUT_DIR.join(format!("{module}.cpp")); if module_cpp.is_file()
// add module entry to hub.rs and move the module file into opencv/ write_has_module(&mut hub_rs, module)?; write_module_include(&mut hub_rs, module)?; let module_filename = format!("{module}.rs"); let module_src_file = OUT_DIR.join(&module_filename); let mut module_rs = BufWriter::new(File::create(&target_module_dir.join(&module_filename))?); // Need to wrap modules inside `mod { }` because they have top-level comments (//!) and those don't play well when // module file is include!d (as opposed to connecting the module with `mod` from the parent module). // The same doesn't apply to `sys` and `types` below because they don't contain top-level comments. writeln!(module_rs, "pub mod {module} {{")?; copy_indent(BufReader::new(File::open(&module_src_file)?), &mut module_rs, "\t")?; add_manual(&mut module_rs, module)?; writeln!(module_rs, "}}")?; let _ = fs::remove_file(module_src_file); // merge multiple *-.type.rs files into a single types.rs let mut header_written = false; let mut type_files = files_with_extension(&OUT_DIR, "rs")? .filter(|f| is_type_file(f, module)) .collect::<Vec<_>>(); type_files.sort_unstable(); for entry in type_files { if entry.metadata().map(|meta| meta.len()).unwrap_or(0) > 0 { if !header_written { write_has_module(&mut types_rs, module)?; writeln!(types_rs, "mod {module}_types {{")?; writeln!(types_rs, "\tuse crate::{{mod_prelude::*, core, types, sys}};")?; writeln!(types_rs)?; header_written = true; } copy_indent(BufReader::new(File::open(&entry)?), &mut types_rs, "\t")?; } let _ = fs::remove_file(entry); } if header_written { writeln!(types_rs, "}}")?; write_has_module(&mut types_rs, module)?; writeln!(types_rs, "pub use {module}_types::*;")?; writeln!(types_rs)?; } // merge module-specific *.externs.rs and generated type-specific *.type.externs.rs into a single sys.rs let externs_rs = OUT_DIR.join(format!("{module}.externs.rs")); write_has_module(&mut sys_rs, module)?; writeln!(sys_rs, "mod {module}_sys {{")?; writeln!(sys_rs, "\tuse super::*;")?; writeln!(sys_rs)?; writeln!(sys_rs, "\textern \"C\" {{")?; copy_indent(BufReader::new(File::open(&externs_rs)?), &mut sys_rs, "\t\t")?; let _ = fs::remove_file(externs_rs); let mut type_extern_files = files_with_extension(&OUT_DIR, "rs")? .filter(|f| is_type_externs_file(f, module)) .collect::<Vec<_>>(); type_extern_files.sort_unstable(); for entry in type_extern_files { if entry.metadata().map(|meta| meta.len()).unwrap_or(0) > 0 { copy_indent(BufReader::new(File::open(&entry)?), &mut sys_rs, "\t\t")?; } let _ = fs::remove_file(entry); } writeln!(sys_rs, "\t}}")?; writeln!(sys_rs, "}}")?; write_has_module(&mut sys_rs, module)?; writeln!(sys_rs, "pub use {module}_sys::*;")?; writeln!(sys_rs)?; } writeln!(hub_rs, "pub mod types {{")?; write_module_include(&mut hub_rs, "types")?; writeln!(hub_rs, "}}")?; writeln!(hub_rs, "#[doc(hidden)]")?; writeln!(hub_rs, "pub mod sys {{")?; write_module_include(&mut hub_rs, "sys")?; writeln!(hub_rs, "}}")?; add_manual(&mut types_rs, "types")?; add_manual(&mut sys_rs, "sys")?; // write hub_prelude that imports all module-specific preludes writeln!(hub_rs, "pub mod hub_prelude {{")?; for module in modules { write!(hub_rs, "\t")?; write_has_module(&mut hub_rs, module)?; writeln!(hub_rs, "\tpub use super::{module}::prelude::*;")?; } writeln!(hub_rs, "}}")?; eprintln!("=== Total binding collection time: {:?}", start.elapsed()); Ok(()) } fn build_job_server() -> Result<Jobserver> { unsafe { jobserver::Client::from_env() } .and_then(|client| { let own_token_released = client.release_raw().is_ok(); let available_jobs = client.available().unwrap_or(0); if available_jobs > 0 { eprintln!("=== Using environment job server with the the amount of available jobs: {available_jobs}"); Some(Jobserver { client, reacquire_token_on_drop: own_token_released, }) } else { client.acquire_raw().expect("Can't reacquire build script thread token"); eprintln!( "=== Available jobs from the environment created jobserver is: {available_jobs} or there is an error reading that value" ); None } }) .or_else(|| { let num_jobs = env::var("NUM_JOBS") .ok() .and_then(|jobs| jobs.parse().ok()) .unwrap_or(2) .max(1); eprintln!("=== Creating a new job server with num_jobs: {num_jobs}"); jobserver::Client::new(num_jobs).ok().map(|client| Jobserver { client, reacquire_token_on_drop: false, }) }) .ok_or_else(|| "Can't create job server".into()) } pub struct Jobserver { client: jobserver::Client, reacquire_token_on_drop: bool, } impl Drop for Jobserver { fn drop(&mut self) { if self.reacquire_token_on_drop { self.client.acquire_raw().expect("Can't reacquire build script thread token"); } } } impl Deref for Jobserver { type Target = jobserver::Client; fn deref(&self) -> &Self::Target { &self.client } }
{ let module_types_cpp = OUT_DIR.join(format!("{module}_types.hpp")); let mut module_types_file = BufWriter::new( OpenOptions::new() .create(true) .truncate(true) .write(true) .open(module_types_cpp)?, ); let mut type_files = files_with_extension(&OUT_DIR, "cpp")? .filter(|f| is_type_file(f, module)) .collect::<Vec<_>>(); type_files.sort_unstable(); for entry in type_files { io::copy(&mut BufReader::new(File::open(&entry)?), &mut module_types_file)?; let _ = fs::remove_file(entry); } }
conditional_block
generator.rs
use std::ffi::{OsStr, OsString}; use std::fs::{File, OpenOptions}; use std::io::{BufRead, BufReader, BufWriter, Write}; use std::ops::Deref; use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::Arc; use std::time::Instant; use std::{env, fs, io, thread}; use opencv_binding_generator::{Generator, IteratorExt}; use crate::docs::transfer_bindings_to_docs; use super::{files_with_extension, files_with_predicate, Library, Result, MODULES, OUT_DIR, SRC_CPP_DIR, SRC_DIR}; pub struct BindingGenerator { build_script_path: OsString, } impl BindingGenerator { pub fn new(build_script_path: OsString) -> Self { Self { build_script_path } } pub fn generate_wrapper(&self, opencv_header_dir: &Path, opencv: &Library) -> Result<()> { let target_docs_dir = env::var_os("OCVRS_DOCS_GENERATE_DIR").map(PathBuf::from); let target_module_dir = OUT_DIR.join("opencv"); let manual_dir = SRC_DIR.join("manual"); eprintln!("=== Generating code in: {}", OUT_DIR.display()); eprintln!("=== Placing generated bindings into: {}", target_module_dir.display()); if let Some(target_docs_dir) = target_docs_dir.as_ref() { eprintln!( "=== Placing static generated docs bindings into: {}", target_docs_dir.display() ); } eprintln!("=== Using OpenCV headers from: {}", opencv_header_dir.display()); let non_dll_files = files_with_predicate(&OUT_DIR, |p| { p.extension().map_or(true, |ext| !ext.eq_ignore_ascii_case("dll")) })?; for path in non_dll_files { let _ = fs::remove_file(path); } let modules = MODULES.get().expect("MODULES not initialized"); self.run(modules, opencv_header_dir, opencv)?; collect_generated_bindings(modules, &target_module_dir, &manual_dir)?; if let Some(target_docs_dir) = target_docs_dir { if !target_docs_dir.exists() { fs::create_dir(&target_docs_dir)?; } transfer_bindings_to_docs(&OUT_DIR, &target_docs_dir); } Ok(()) } fn run(&self, modules: &'static [String], opencv_header_dir: &Path, opencv: &Library) -> Result<()> { let additional_include_dirs = opencv .include_paths .iter() .map(|path| path.as_path()) .filter(|&include_path| include_path != opencv_header_dir) .collect::<Vec<_>>(); let gen = Generator::new(opencv_header_dir, &additional_include_dirs, &SRC_CPP_DIR); eprintln!("=== Clang: {}", gen.clang_version()); eprintln!("=== Clang command line args: {:#?}", gen.build_clang_command_line_args()); let additional_include_dirs = Arc::new( additional_include_dirs .into_iter() .map(|p| p.to_str().expect("Can't convert additional include dir to UTF-8 string")) .join(","), ); let opencv_header_dir = Arc::new(opencv_header_dir.to_owned()); let job_server = build_job_server()?; let mut join_handles = Vec::with_capacity(modules.len()); let start = Instant::now(); // todo use thread::scope when MSRV is 1.63 eprintln!("=== Generating {} modules", modules.len()); modules.iter().for_each(|module| { let token = job_server.acquire().expect("Can't acquire token from job server"); let join_handle = thread::spawn({ let additional_include_dirs = Arc::clone(&additional_include_dirs); let opencv_header_dir = Arc::clone(&opencv_header_dir); let build_script_path = self.build_script_path.clone(); move || { let module_start = Instant::now(); let mut bin_generator = Command::new(build_script_path); bin_generator .arg(&*opencv_header_dir) .arg(&*SRC_CPP_DIR) .arg(&*OUT_DIR) .arg(module) .arg(&*additional_include_dirs); eprintln!("=== Running: {bin_generator:?}"); let res = bin_generator .status() .unwrap_or_else(|e| panic!("Can't run bindings generator for module: {module}, error: {e}")); if !res.success() { panic!("Failed to run the bindings generator for module: {module}"); } eprintln!("=== Generated: {module} in {:?}", module_start.elapsed()); drop(token); // needed to move the token to the thread } }); join_handles.push(join_handle); }); for join_handle in join_handles { join_handle.join().expect("Generator process panicked"); } eprintln!("=== Total binding generation time: {:?}", start.elapsed()); Ok(()) } } fn is_type_file(path: &Path, module: &str) -> bool { path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| { let mut stem_chars = stem.chars(); (&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits matches!(stem_chars.next(), Some('-')) && // dash module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name matches!(stem_chars.next(), Some('-')) && // dash stem.ends_with(".type") // ends with ".type" }) } fn is_type_externs_file(path: &Path, module: &str) -> bool { path.file_stem().and_then(OsStr::to_str).map_or(false, |stem| { let mut stem_chars = stem.chars(); (&mut stem_chars).take(3).all(|c| c.is_ascii_digit()) && // first 3 chars are digits matches!(stem_chars.next(), Some('-')) && // dash module.chars().zip(&mut stem_chars).all(|(m, s)| m == s) && // module name matches!(stem_chars.next(), Some('-')) && // dash stem.ends_with(".type.externs") // ends with ".type" }) } fn copy_indent(mut read: impl BufRead, mut write: impl Write, indent: &str) -> Result<()> { let mut line = Vec::with_capacity(100); while read.read_until(b'\n', &mut line)? != 0 { write.write_all(indent.as_bytes())?; write.write_all(&line)?; line.clear(); } Ok(()) } fn collect_generated_bindings(modules: &[String], target_module_dir: &Path, manual_dir: &Path) -> Result<()> { if !target_module_dir.exists() { fs::create_dir(target_module_dir)?; } for path in files_with_extension(target_module_dir, "rs")? { let _ = fs::remove_file(path); } fn write_has_module(mut write: impl Write, module: &str) -> Result<()> { Ok(writeln!(write, "#[cfg(ocvrs_has_module_{module})]")?) } fn write_module_include(write: &mut BufWriter<File>, module: &str) -> Result<()> { // Use include instead of #[path] attribute because rust-analyzer doesn't handle #[path] inside other include! too well: // https://github.com/twistedfall/opencv-rust/issues/418 // https://github.com/rust-lang/rust-analyzer/issues/11682 Ok(writeln!( write, r#"include!(concat!(env!("OUT_DIR"), "/opencv/{module}.rs"));"# )?) } let add_manual = |file: &mut BufWriter<File>, module: &str| -> Result<bool> { if manual_dir.join(format!("{module}.rs")).exists() { writeln!(file, "pub use crate::manual::{module}::*;")?; Ok(true) } else { Ok(false) } }; let start = Instant::now(); let mut hub_rs = BufWriter::new(File::create(target_module_dir.join("hub.rs"))?); let mut types_rs = BufWriter::new(File::create(target_module_dir.join("types.rs"))?); writeln!(types_rs)?; let mut sys_rs = BufWriter::new(File::create(target_module_dir.join("sys.rs"))?); writeln!(sys_rs, "use crate::{{mod_prelude_sys::*, core}};")?; writeln!(sys_rs)?; for module in modules { // merge multiple *-type.cpp files into a single module_types.hpp let module_cpp = OUT_DIR.join(format!("{module}.cpp")); if module_cpp.is_file() { let module_types_cpp = OUT_DIR.join(format!("{module}_types.hpp")); let mut module_types_file = BufWriter::new( OpenOptions::new() .create(true) .truncate(true) .write(true) .open(module_types_cpp)?, ); let mut type_files = files_with_extension(&OUT_DIR, "cpp")? .filter(|f| is_type_file(f, module)) .collect::<Vec<_>>(); type_files.sort_unstable(); for entry in type_files {
io::copy(&mut BufReader::new(File::open(&entry)?), &mut module_types_file)?; let _ = fs::remove_file(entry); } } // add module entry to hub.rs and move the module file into opencv/ write_has_module(&mut hub_rs, module)?; write_module_include(&mut hub_rs, module)?; let module_filename = format!("{module}.rs"); let module_src_file = OUT_DIR.join(&module_filename); let mut module_rs = BufWriter::new(File::create(&target_module_dir.join(&module_filename))?); // Need to wrap modules inside `mod { }` because they have top-level comments (//!) and those don't play well when // module file is include!d (as opposed to connecting the module with `mod` from the parent module). // The same doesn't apply to `sys` and `types` below because they don't contain top-level comments. writeln!(module_rs, "pub mod {module} {{")?; copy_indent(BufReader::new(File::open(&module_src_file)?), &mut module_rs, "\t")?; add_manual(&mut module_rs, module)?; writeln!(module_rs, "}}")?; let _ = fs::remove_file(module_src_file); // merge multiple *-.type.rs files into a single types.rs let mut header_written = false; let mut type_files = files_with_extension(&OUT_DIR, "rs")? .filter(|f| is_type_file(f, module)) .collect::<Vec<_>>(); type_files.sort_unstable(); for entry in type_files { if entry.metadata().map(|meta| meta.len()).unwrap_or(0) > 0 { if !header_written { write_has_module(&mut types_rs, module)?; writeln!(types_rs, "mod {module}_types {{")?; writeln!(types_rs, "\tuse crate::{{mod_prelude::*, core, types, sys}};")?; writeln!(types_rs)?; header_written = true; } copy_indent(BufReader::new(File::open(&entry)?), &mut types_rs, "\t")?; } let _ = fs::remove_file(entry); } if header_written { writeln!(types_rs, "}}")?; write_has_module(&mut types_rs, module)?; writeln!(types_rs, "pub use {module}_types::*;")?; writeln!(types_rs)?; } // merge module-specific *.externs.rs and generated type-specific *.type.externs.rs into a single sys.rs let externs_rs = OUT_DIR.join(format!("{module}.externs.rs")); write_has_module(&mut sys_rs, module)?; writeln!(sys_rs, "mod {module}_sys {{")?; writeln!(sys_rs, "\tuse super::*;")?; writeln!(sys_rs)?; writeln!(sys_rs, "\textern \"C\" {{")?; copy_indent(BufReader::new(File::open(&externs_rs)?), &mut sys_rs, "\t\t")?; let _ = fs::remove_file(externs_rs); let mut type_extern_files = files_with_extension(&OUT_DIR, "rs")? .filter(|f| is_type_externs_file(f, module)) .collect::<Vec<_>>(); type_extern_files.sort_unstable(); for entry in type_extern_files { if entry.metadata().map(|meta| meta.len()).unwrap_or(0) > 0 { copy_indent(BufReader::new(File::open(&entry)?), &mut sys_rs, "\t\t")?; } let _ = fs::remove_file(entry); } writeln!(sys_rs, "\t}}")?; writeln!(sys_rs, "}}")?; write_has_module(&mut sys_rs, module)?; writeln!(sys_rs, "pub use {module}_sys::*;")?; writeln!(sys_rs)?; } writeln!(hub_rs, "pub mod types {{")?; write_module_include(&mut hub_rs, "types")?; writeln!(hub_rs, "}}")?; writeln!(hub_rs, "#[doc(hidden)]")?; writeln!(hub_rs, "pub mod sys {{")?; write_module_include(&mut hub_rs, "sys")?; writeln!(hub_rs, "}}")?; add_manual(&mut types_rs, "types")?; add_manual(&mut sys_rs, "sys")?; // write hub_prelude that imports all module-specific preludes writeln!(hub_rs, "pub mod hub_prelude {{")?; for module in modules { write!(hub_rs, "\t")?; write_has_module(&mut hub_rs, module)?; writeln!(hub_rs, "\tpub use super::{module}::prelude::*;")?; } writeln!(hub_rs, "}}")?; eprintln!("=== Total binding collection time: {:?}", start.elapsed()); Ok(()) } fn build_job_server() -> Result<Jobserver> { unsafe { jobserver::Client::from_env() } .and_then(|client| { let own_token_released = client.release_raw().is_ok(); let available_jobs = client.available().unwrap_or(0); if available_jobs > 0 { eprintln!("=== Using environment job server with the the amount of available jobs: {available_jobs}"); Some(Jobserver { client, reacquire_token_on_drop: own_token_released, }) } else { client.acquire_raw().expect("Can't reacquire build script thread token"); eprintln!( "=== Available jobs from the environment created jobserver is: {available_jobs} or there is an error reading that value" ); None } }) .or_else(|| { let num_jobs = env::var("NUM_JOBS") .ok() .and_then(|jobs| jobs.parse().ok()) .unwrap_or(2) .max(1); eprintln!("=== Creating a new job server with num_jobs: {num_jobs}"); jobserver::Client::new(num_jobs).ok().map(|client| Jobserver { client, reacquire_token_on_drop: false, }) }) .ok_or_else(|| "Can't create job server".into()) } pub struct Jobserver { client: jobserver::Client, reacquire_token_on_drop: bool, } impl Drop for Jobserver { fn drop(&mut self) { if self.reacquire_token_on_drop { self.client.acquire_raw().expect("Can't reacquire build script thread token"); } } } impl Deref for Jobserver { type Target = jobserver::Client; fn deref(&self) -> &Self::Target { &self.client } }
random_line_split
Mission_Util_V01.py
import re from os import sep, path from traceback import format_exc import tkinter as tk from tkinter import ttk from tkinter.messagebox import showinfo, showerror from tkinter.scrolledtext import ScrolledText from tkinter.filedialog import askopenfilename # Game takes vanillas based on their index in this list, so no touchy VANILLAS = [ "Empty", "Timer", "Wires", "BigButton", "Keypad", "Simon", "WhosOnFirst", "Memory", "Morse", "Venn", "WireSequence", "Maze", "Password", "NeedyVentGas", "NeedyCapacitor", "NeedyKnob"] DMG_LINE_TYPES = { r"^\/\/\/\/ (.*?)$": "name", r"^\/\/\/ (.*?)$": "description", r"^((\d+):)?(\d+):(\d+)$": "time_limit", r"^(\d+)X$": "strikes", r"^needyactivationtime:(\d+)$": "needy_activation_time", r"^widgets:(\d+)$": "widgets", r"^!?((\d+)\s?\*\s?)?(.*?)$": "modules" } DMG_IGNORE = ["room:", "factory:", "mode:"] #Ignore these as there are no such fields in a mission asset DMG_TOGGLABLES = { #Values for checkboxes (DMG_Line: (key, value)) "frontonly": ("front_only", 1), "nopacing": ("pacing", 0) } def change_text(widget, text): try: #ScrolledText widget.delete("1.0", tk.END) widget.insert("1.0", text) except tk.TclError: #Bad entry index - Entry widget.delete(0, tk.END) widget.insert(0, text) class AssetFile: # default settings/variable init def __init__(self): self.iden = "mission" self.name = "Mission" self.description = "a mission" self.time_limit = "300" self.strikes = "3" self.needy_activation_time = "90" self.front_only = 0 self.widgets = "5" self.modules = "" self.separator = "\n" self.pacing = 1 # ran when pressing the create button, returns true or false based on sanity() def enter(self, iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing): # takes all of the inputted info and puts it into AssetFile's variables if they weren't left blank self.iden = iden.strip() if iden.strip() != "" else self.iden self.name = name.strip() if name.strip() != "" else self.name self.description = description.strip() if "\'{}\'".format(description.strip()) != "" else self.description self.time_limit = time_limit.strip() if time_limit.strip() != "" else self.time_limit self.strikes = strikes.strip() if strikes.strip() != "" else self.strikes self.needy_activation_time = needy_activation_time.strip() if needy_activation_time.strip() != "" else self.needy_activation_time self.front_only = front_only self.widgets = widgets.strip() if widgets.strip() != "" else self.widgets self.modules = modules.strip() if modules.strip() != "" else self.modules switcher = {"newlines": "\n", "spaces": " ", "tabs": "\t"} self.separator = switcher.get(separator, "\n") self.pacing = pacing # stops the process if any of the variables don't pass the sanity check if not self.sanity(): return False # string of pain, the whole asset file before the modlist is written here retstring = "%YAML 1.1\n%TAG !u! tag:unity3d.com,2011:\n--- !u!114 &11400000\nMonoBehaviour:\n m_ObjectHideFlags: 0\n m_PrefabParentObject: {{fileID: 0}}\n m_PrefabInternal: {{fileID: 0}}\n m_GameObject: {{fileID: 0}}\n m_Enabled: 1\n m_EditorHideFlags: 0\n m_Script: {{fileID: -548183353, guid: 45b809be76fd7a3468b6f517bced6f28, type: 3}}\n m_Name: {}\n m_EditorClassIdentifier: {}\n DisplayName: {}\n Description: {}\n GeneratorSetting:\n TimeLimit: {}\n NumStrikes: {}\n TimeBeforeNeedyActivation: {}\n FrontFaceOnly: {}\n OptionalWidgetCount: {}\n ComponentPools:".format(self.iden, self.iden, self.name, self.description, self.time_limit, self.strikes, self.needy_activation_time, str(self.front_only), self.widgets) modlist = self.modules.split(self.separator) for i in range(len(modlist)): count = 1 # how many of current module there is vann = 1 # whether current module is vanilla, 1 is vanilla, 0 is modded component = "" # vanilla modules are put in ComponentTypes rather than ModTypes, so seperate string to go under ComponentTypes modstring = " []" # this goes under ModTypes, default without mods is here # this counts modules if more than one # example 2*module would set count to 2 and then continue with everything after 2* if "*" in modlist[i]: ind = modlist[i].index("*") count = modlist[i][:ind] modlist[i] = modlist[i][ind+1:] # this removes the formatting of [] around pools if "[" in modlist[i]: modlist[i] = modlist[i][1:-1] # even non-pooled modules are treated like a pool with one module, since they are formatted the same pooled = modlist[i].split(",") for module in pooled: # vanilla formatting: add 0{}000000 to ComponentTypes, where {} is the hex index in the earlier VANILLAS string if module in VANILLAS: temp = str(hex(VANILLAS.index(module)))[2:] component += "0{}000000".format(temp) else: vann = 0 # set to zero because if there are mods, the default of " []" is wrong modstring += "\n - {}".format(module) # removes default " []" if vann == 0: modstring = modstring[3:] # finally adds the count, compenent, and modstring to what goes in the file below the string of pain retstring += "\n - Count: {}\n AllowedSources: 2\n ComponentTypes: {}\n SpecialComponentType: 0\n ModTypes:{}".format(count, component, modstring) retstring += "\n PacingEventsEnabled: {}".format(str(self.pacing)) # pacing goes below the mods for some reason retstring += "\n # Mission Utility v0.2 by BlvdBroken" # version number for debugging # creates a .asset file with the name of the ID, since thats what Unity does as well f = open("{}.asset".format(self.iden), "w") f.write(retstring) f.close() #print(retstring) return True # returns True to show it passes the sanity check, important for createMission() in the Gui class # makes sure Unity nor your OS gets mad at you for your asset file, runs within enter() def sanity(self): # these first two are illegal characters in Win/Mac files as well as reserved filenames in Win illegalFileNameChars = ["<", ">", ":", "\"", "/", "\\", "|", "?", "*", " ", "."] illegalFileNames = ["CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9"] if (self.iden.upper() in illegalFileNames): showinfo(title="Error", message="Illegal file name.") return False for chara in illegalFileNameChars: if chara in self.iden: showinfo(title="Error", message="Illegal character in mission ID.") return False # next four yell at you for having non-numerics in numbered inputs: time limit, strikes, needy activation time, and widgets if not self.time_limit.isnumeric(): showinfo(title="Error", message="Illegal character in Time Limit.") return False if not self.strikes.isnumeric(): showinfo(title="Error", message="Illegal character in Strikes.") return False if not self.needy_activation_time.isnumeric(): showinfo(title="Error", message="Illegal character in Needy Activation Time.") return False if not self.widgets.isnumeric(): showinfo(title="Error", message="Illegal character in Widgets.") return False # TODO figure out what characters cause the descriptions to throw a fit, or what I can include to make them acceptable # currently most special characters in the description break the file according to Unity, even though it's the exact same format if you enter it through Unity return True class Gui(tk.Tk): # currently everything except createMission() runs in init, while not the best practice it makes the most sense for a GUI def __init__(self): super().__init__() self.title("Mission Asset Utility") self.geometry('900x750') iden = tk.StringVar() name = tk.StringVar() description = tk.StringVar() time_limit = tk.StringVar() strikes = tk.StringVar() needy_activation_time = tk.StringVar() self.front_only = tk.StringVar(value=0) widgets = tk.StringVar() #modules = tk.StringVar() separator = tk.StringVar() self.pacing = tk.StringVar(value=0) # using tk's grid functionality as it's very nice compared to other options # column 0 is for names while 1 is for inputs # row 8 is where module list goes self.columnconfigure(0, weight = 0) self.columnconfigure(1, weight = 20) self.rowconfigure(8, weight = 5) # sticky="WE" means it fills it's whole grid location left to right (west to east) iden_label = ttk.Label(self, text="Mission ID:") iden_label.grid(column=0, row=0, padx=10, pady=5) self.iden_box = ttk.Entry(self, textvariable=iden) self.iden_box.grid(column=1, row=0, sticky="WE", padx=10, pady=5) name_label = ttk.Label(self, text="Name:") name_label.grid(column=0, row=1, padx=10, pady=5) self.name_box = ttk.Entry(self, textvariable=name) self.name_box.grid(column=1, row=1, sticky="WE", padx=10, pady=5) description_label = ttk.Label(self, text="Description:") description_label.grid(column=0, row=2, padx=10, pady=5) self.description_box = ttk.Entry(self, textvariable=description) self.description_box.grid(column=1, row=2, sticky="WE", padx=10, pady=5) time_limit_label = ttk.Label(self, text="Time Limit:") time_limit_label.grid(column=0, row=3, padx=10, pady=5) self.time_limit_box = ttk.Entry(self, textvariable=time_limit) self.time_limit_box.grid(column=1, row=3, sticky="WE", padx=10, pady=5) strikes_label = ttk.Label(self, text="Strikes:") strikes_label.grid(column=0, row=4, padx=10, pady=5) self.strikes_box = ttk.Entry(self, textvariable=strikes) self.strikes_box.grid(column=1, row=4, sticky="WE", padx=10, pady=5) needy_activation_time_label = ttk.Label(self, text="Needy Activation Time:") needy_activation_time_label.grid(column=0, row=5, padx=10, pady=5) self.needy_activation_time_box = ttk.Entry(self, textvariable=needy_activation_time) self.needy_activation_time_box.grid(column=1, row=5, sticky="WE", padx=10, pady=5) widgets_label = ttk.Label(self, text="Widget Amount:") widgets_label.grid(column=0, row=6, padx=10, pady=5) self.widgets_box = ttk.Entry(self, textvariable=widgets) self.widgets_box.grid(column=1, row=6, sticky="WE", padx=10, pady=5) # TODO fix spaghetti code that was made in an attempt to make these look nice, don't ask what I was going for front_only_check = ttk.Checkbutton(self, text="Front Only", variable=self.front_only, onvalue=1, offvalue=0).grid(column=1, row=7, padx=10, pady=10) pacing_check = ttk.Checkbutton(self, text="Pacing Events", variable=self.pacing, onvalue=1, offvalue=0).grid(column=1, row=7, sticky="E", padx=10, pady=10, ipadx=100) # ScrolledText comes with a scrollbar, but it's only for up and down, so I add a Scrollbar to go left and right in case you are using spaces/tabs to separate modules_label = ttk.Label(self, text="Module List:") modules_label.grid(column=0, row=8, padx=10, pady=5) self.modules_box = ScrolledText(self, width=10, height=10, wrap=tk.NONE) self.modules_box.grid(column=1, row=8, sticky="NSEW", padx=10, pady=0) modules_scrollbar = ttk.Scrollbar(self, orient='horizontal', command=self.modules_box.xview) modules_scrollbar.grid(column=1, row=9, sticky="EW", padx=10) self.modules_box["xscrollcommand"] = modules_scrollbar.set # TODO make the sheet usable *before* you select this, no idea why this happens separator_label = ttk.Label(self, text="Separator:") separator_label.grid(column=0, row=10, padx=10, pady=5) separator_box = ttk.Combobox(self, textvariable=separator) separator_box['values'] = ["newlines", "spaces", "tabs"] separator_box['state'] = 'readonly' separator_box.grid(column=1, row=10, sticky="W", padx=10, pady=5) open_dmg_button = ttk.Button(self, text = "Open DMG mission", command = self.parse_dmg).grid(column=1, row=11, padx=10, pady=5) # runs createMission() with all of the info in the other boxes when pressed enter_button = ttk.Button(self, text="Create Asset File", command=lambda: self.createMission(iden.get(), name.get(), description.get(), time_limit.get(), strikes.get(), needy_activation_time.get(), self.front_only.get(), widgets.get(), self.modules_box.get("1.0", tk.END), separator.get(), self.pacing.get())).grid(column=1, row=12, padx=10, pady=5) # shows you the defaults and some important notes before you start showinfo(title="Info", message="Defaults:\n ID: mission\n Name: Mission\n Description: a mission\n Time Limit: 300\n Strikes: 3\n Needy Activation Time: 90\n Widgets: 5\n\nNote: All times are in seconds.\n\nThe module list should use the module ID's, which can be found at ktane.timwi.de") # this is important for the tk gui, I imagine it just runs a constant update self.mainloop() #Select DMG mission file and parse its values into the GUI inputs def parse_dmg(self): default_values = AssetFile() #Select and open file filename = askopenfilename(title = "Select DMG mission") default_values.iden = path.basename(filename).split(".")[0] #Set iden to be the name of the selected file lines = [] try: with open(filename, "r") as mission_file: lines = mission_file.readlines() except Exception as e: print(format_exc()) showerror(title="Mission read error", message="Couldn't read the selected file. Are you sure it's a DMG mission? ({})".format(type(e).__name__)) return #For each line, detect which property of a mission it matches, and change the value of the mission according to that for line in lines: line = line.strip() skip = False for pattern in DMG_IGNORE: if line.startswith(pattern):
if skip: continue if line in DMG_TOGGLABLES: #Change checkbox value setattr(default_values, *DMG_TOGGLABLES[line]) continue for regex in DMG_LINE_TYPES: #Change entry value match = re.search(regex, line) if match != None: field = DMG_LINE_TYPES[regex] value = match.group(1) if field == "time_limit": #Calculate seconds timesplit = tuple(map(int, match.string.split(":"))) value = timesplit[0]*3600+timesplit[1]*60+timesplit[2] if len(timesplit)==3 else timesplit[0]*60+timesplit[1] if field == "modules": #New modules have to be joined to the old ones value = (default_values.modules + "\n" + match.string).strip() setattr(default_values, field, value) break for regex in DMG_LINE_TYPES: #Finalize entry values field = DMG_LINE_TYPES[regex] change_text(getattr(self, field+"_box"), getattr(default_values, field)) change_text(self.iden_box, default_values.iden) #Iden doesn't have an entry for variable in DMG_TOGGLABLES: #Finalize checkbox values var_name = DMG_TOGGLABLES[variable][0] getattr(self, var_name).set(getattr(default_values, var_name)) # ran when enter button is pressed, creates a new AssetFile and then tells you it worked if it passes the sanity check def createMission(self, iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing): created_mission = AssetFile() if created_mission.enter(iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing): showinfo(title="You did it!", message="Mission file created and downloaded.") def main(): Gui() if __name__ == "__main__": main()
skip = True break
conditional_block
Mission_Util_V01.py
import re from os import sep, path from traceback import format_exc import tkinter as tk from tkinter import ttk from tkinter.messagebox import showinfo, showerror from tkinter.scrolledtext import ScrolledText from tkinter.filedialog import askopenfilename # Game takes vanillas based on their index in this list, so no touchy VANILLAS = [ "Empty", "Timer", "Wires", "BigButton", "Keypad", "Simon", "WhosOnFirst", "Memory", "Morse", "Venn", "WireSequence", "Maze", "Password", "NeedyVentGas", "NeedyCapacitor", "NeedyKnob"] DMG_LINE_TYPES = { r"^\/\/\/\/ (.*?)$": "name", r"^\/\/\/ (.*?)$": "description", r"^((\d+):)?(\d+):(\d+)$": "time_limit", r"^(\d+)X$": "strikes", r"^needyactivationtime:(\d+)$": "needy_activation_time", r"^widgets:(\d+)$": "widgets", r"^!?((\d+)\s?\*\s?)?(.*?)$": "modules" } DMG_IGNORE = ["room:", "factory:", "mode:"] #Ignore these as there are no such fields in a mission asset DMG_TOGGLABLES = { #Values for checkboxes (DMG_Line: (key, value)) "frontonly": ("front_only", 1), "nopacing": ("pacing", 0) } def change_text(widget, text): try: #ScrolledText widget.delete("1.0", tk.END) widget.insert("1.0", text) except tk.TclError: #Bad entry index - Entry widget.delete(0, tk.END) widget.insert(0, text) class AssetFile: # default settings/variable init def
(self): self.iden = "mission" self.name = "Mission" self.description = "a mission" self.time_limit = "300" self.strikes = "3" self.needy_activation_time = "90" self.front_only = 0 self.widgets = "5" self.modules = "" self.separator = "\n" self.pacing = 1 # ran when pressing the create button, returns true or false based on sanity() def enter(self, iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing): # takes all of the inputted info and puts it into AssetFile's variables if they weren't left blank self.iden = iden.strip() if iden.strip() != "" else self.iden self.name = name.strip() if name.strip() != "" else self.name self.description = description.strip() if "\'{}\'".format(description.strip()) != "" else self.description self.time_limit = time_limit.strip() if time_limit.strip() != "" else self.time_limit self.strikes = strikes.strip() if strikes.strip() != "" else self.strikes self.needy_activation_time = needy_activation_time.strip() if needy_activation_time.strip() != "" else self.needy_activation_time self.front_only = front_only self.widgets = widgets.strip() if widgets.strip() != "" else self.widgets self.modules = modules.strip() if modules.strip() != "" else self.modules switcher = {"newlines": "\n", "spaces": " ", "tabs": "\t"} self.separator = switcher.get(separator, "\n") self.pacing = pacing # stops the process if any of the variables don't pass the sanity check if not self.sanity(): return False # string of pain, the whole asset file before the modlist is written here retstring = "%YAML 1.1\n%TAG !u! tag:unity3d.com,2011:\n--- !u!114 &11400000\nMonoBehaviour:\n m_ObjectHideFlags: 0\n m_PrefabParentObject: {{fileID: 0}}\n m_PrefabInternal: {{fileID: 0}}\n m_GameObject: {{fileID: 0}}\n m_Enabled: 1\n m_EditorHideFlags: 0\n m_Script: {{fileID: -548183353, guid: 45b809be76fd7a3468b6f517bced6f28, type: 3}}\n m_Name: {}\n m_EditorClassIdentifier: {}\n DisplayName: {}\n Description: {}\n GeneratorSetting:\n TimeLimit: {}\n NumStrikes: {}\n TimeBeforeNeedyActivation: {}\n FrontFaceOnly: {}\n OptionalWidgetCount: {}\n ComponentPools:".format(self.iden, self.iden, self.name, self.description, self.time_limit, self.strikes, self.needy_activation_time, str(self.front_only), self.widgets) modlist = self.modules.split(self.separator) for i in range(len(modlist)): count = 1 # how many of current module there is vann = 1 # whether current module is vanilla, 1 is vanilla, 0 is modded component = "" # vanilla modules are put in ComponentTypes rather than ModTypes, so seperate string to go under ComponentTypes modstring = " []" # this goes under ModTypes, default without mods is here # this counts modules if more than one # example 2*module would set count to 2 and then continue with everything after 2* if "*" in modlist[i]: ind = modlist[i].index("*") count = modlist[i][:ind] modlist[i] = modlist[i][ind+1:] # this removes the formatting of [] around pools if "[" in modlist[i]: modlist[i] = modlist[i][1:-1] # even non-pooled modules are treated like a pool with one module, since they are formatted the same pooled = modlist[i].split(",") for module in pooled: # vanilla formatting: add 0{}000000 to ComponentTypes, where {} is the hex index in the earlier VANILLAS string if module in VANILLAS: temp = str(hex(VANILLAS.index(module)))[2:] component += "0{}000000".format(temp) else: vann = 0 # set to zero because if there are mods, the default of " []" is wrong modstring += "\n - {}".format(module) # removes default " []" if vann == 0: modstring = modstring[3:] # finally adds the count, compenent, and modstring to what goes in the file below the string of pain retstring += "\n - Count: {}\n AllowedSources: 2\n ComponentTypes: {}\n SpecialComponentType: 0\n ModTypes:{}".format(count, component, modstring) retstring += "\n PacingEventsEnabled: {}".format(str(self.pacing)) # pacing goes below the mods for some reason retstring += "\n # Mission Utility v0.2 by BlvdBroken" # version number for debugging # creates a .asset file with the name of the ID, since thats what Unity does as well f = open("{}.asset".format(self.iden), "w") f.write(retstring) f.close() #print(retstring) return True # returns True to show it passes the sanity check, important for createMission() in the Gui class # makes sure Unity nor your OS gets mad at you for your asset file, runs within enter() def sanity(self): # these first two are illegal characters in Win/Mac files as well as reserved filenames in Win illegalFileNameChars = ["<", ">", ":", "\"", "/", "\\", "|", "?", "*", " ", "."] illegalFileNames = ["CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9"] if (self.iden.upper() in illegalFileNames): showinfo(title="Error", message="Illegal file name.") return False for chara in illegalFileNameChars: if chara in self.iden: showinfo(title="Error", message="Illegal character in mission ID.") return False # next four yell at you for having non-numerics in numbered inputs: time limit, strikes, needy activation time, and widgets if not self.time_limit.isnumeric(): showinfo(title="Error", message="Illegal character in Time Limit.") return False if not self.strikes.isnumeric(): showinfo(title="Error", message="Illegal character in Strikes.") return False if not self.needy_activation_time.isnumeric(): showinfo(title="Error", message="Illegal character in Needy Activation Time.") return False if not self.widgets.isnumeric(): showinfo(title="Error", message="Illegal character in Widgets.") return False # TODO figure out what characters cause the descriptions to throw a fit, or what I can include to make them acceptable # currently most special characters in the description break the file according to Unity, even though it's the exact same format if you enter it through Unity return True class Gui(tk.Tk): # currently everything except createMission() runs in init, while not the best practice it makes the most sense for a GUI def __init__(self): super().__init__() self.title("Mission Asset Utility") self.geometry('900x750') iden = tk.StringVar() name = tk.StringVar() description = tk.StringVar() time_limit = tk.StringVar() strikes = tk.StringVar() needy_activation_time = tk.StringVar() self.front_only = tk.StringVar(value=0) widgets = tk.StringVar() #modules = tk.StringVar() separator = tk.StringVar() self.pacing = tk.StringVar(value=0) # using tk's grid functionality as it's very nice compared to other options # column 0 is for names while 1 is for inputs # row 8 is where module list goes self.columnconfigure(0, weight = 0) self.columnconfigure(1, weight = 20) self.rowconfigure(8, weight = 5) # sticky="WE" means it fills it's whole grid location left to right (west to east) iden_label = ttk.Label(self, text="Mission ID:") iden_label.grid(column=0, row=0, padx=10, pady=5) self.iden_box = ttk.Entry(self, textvariable=iden) self.iden_box.grid(column=1, row=0, sticky="WE", padx=10, pady=5) name_label = ttk.Label(self, text="Name:") name_label.grid(column=0, row=1, padx=10, pady=5) self.name_box = ttk.Entry(self, textvariable=name) self.name_box.grid(column=1, row=1, sticky="WE", padx=10, pady=5) description_label = ttk.Label(self, text="Description:") description_label.grid(column=0, row=2, padx=10, pady=5) self.description_box = ttk.Entry(self, textvariable=description) self.description_box.grid(column=1, row=2, sticky="WE", padx=10, pady=5) time_limit_label = ttk.Label(self, text="Time Limit:") time_limit_label.grid(column=0, row=3, padx=10, pady=5) self.time_limit_box = ttk.Entry(self, textvariable=time_limit) self.time_limit_box.grid(column=1, row=3, sticky="WE", padx=10, pady=5) strikes_label = ttk.Label(self, text="Strikes:") strikes_label.grid(column=0, row=4, padx=10, pady=5) self.strikes_box = ttk.Entry(self, textvariable=strikes) self.strikes_box.grid(column=1, row=4, sticky="WE", padx=10, pady=5) needy_activation_time_label = ttk.Label(self, text="Needy Activation Time:") needy_activation_time_label.grid(column=0, row=5, padx=10, pady=5) self.needy_activation_time_box = ttk.Entry(self, textvariable=needy_activation_time) self.needy_activation_time_box.grid(column=1, row=5, sticky="WE", padx=10, pady=5) widgets_label = ttk.Label(self, text="Widget Amount:") widgets_label.grid(column=0, row=6, padx=10, pady=5) self.widgets_box = ttk.Entry(self, textvariable=widgets) self.widgets_box.grid(column=1, row=6, sticky="WE", padx=10, pady=5) # TODO fix spaghetti code that was made in an attempt to make these look nice, don't ask what I was going for front_only_check = ttk.Checkbutton(self, text="Front Only", variable=self.front_only, onvalue=1, offvalue=0).grid(column=1, row=7, padx=10, pady=10) pacing_check = ttk.Checkbutton(self, text="Pacing Events", variable=self.pacing, onvalue=1, offvalue=0).grid(column=1, row=7, sticky="E", padx=10, pady=10, ipadx=100) # ScrolledText comes with a scrollbar, but it's only for up and down, so I add a Scrollbar to go left and right in case you are using spaces/tabs to separate modules_label = ttk.Label(self, text="Module List:") modules_label.grid(column=0, row=8, padx=10, pady=5) self.modules_box = ScrolledText(self, width=10, height=10, wrap=tk.NONE) self.modules_box.grid(column=1, row=8, sticky="NSEW", padx=10, pady=0) modules_scrollbar = ttk.Scrollbar(self, orient='horizontal', command=self.modules_box.xview) modules_scrollbar.grid(column=1, row=9, sticky="EW", padx=10) self.modules_box["xscrollcommand"] = modules_scrollbar.set # TODO make the sheet usable *before* you select this, no idea why this happens separator_label = ttk.Label(self, text="Separator:") separator_label.grid(column=0, row=10, padx=10, pady=5) separator_box = ttk.Combobox(self, textvariable=separator) separator_box['values'] = ["newlines", "spaces", "tabs"] separator_box['state'] = 'readonly' separator_box.grid(column=1, row=10, sticky="W", padx=10, pady=5) open_dmg_button = ttk.Button(self, text = "Open DMG mission", command = self.parse_dmg).grid(column=1, row=11, padx=10, pady=5) # runs createMission() with all of the info in the other boxes when pressed enter_button = ttk.Button(self, text="Create Asset File", command=lambda: self.createMission(iden.get(), name.get(), description.get(), time_limit.get(), strikes.get(), needy_activation_time.get(), self.front_only.get(), widgets.get(), self.modules_box.get("1.0", tk.END), separator.get(), self.pacing.get())).grid(column=1, row=12, padx=10, pady=5) # shows you the defaults and some important notes before you start showinfo(title="Info", message="Defaults:\n ID: mission\n Name: Mission\n Description: a mission\n Time Limit: 300\n Strikes: 3\n Needy Activation Time: 90\n Widgets: 5\n\nNote: All times are in seconds.\n\nThe module list should use the module ID's, which can be found at ktane.timwi.de") # this is important for the tk gui, I imagine it just runs a constant update self.mainloop() #Select DMG mission file and parse its values into the GUI inputs def parse_dmg(self): default_values = AssetFile() #Select and open file filename = askopenfilename(title = "Select DMG mission") default_values.iden = path.basename(filename).split(".")[0] #Set iden to be the name of the selected file lines = [] try: with open(filename, "r") as mission_file: lines = mission_file.readlines() except Exception as e: print(format_exc()) showerror(title="Mission read error", message="Couldn't read the selected file. Are you sure it's a DMG mission? ({})".format(type(e).__name__)) return #For each line, detect which property of a mission it matches, and change the value of the mission according to that for line in lines: line = line.strip() skip = False for pattern in DMG_IGNORE: if line.startswith(pattern): skip = True break if skip: continue if line in DMG_TOGGLABLES: #Change checkbox value setattr(default_values, *DMG_TOGGLABLES[line]) continue for regex in DMG_LINE_TYPES: #Change entry value match = re.search(regex, line) if match != None: field = DMG_LINE_TYPES[regex] value = match.group(1) if field == "time_limit": #Calculate seconds timesplit = tuple(map(int, match.string.split(":"))) value = timesplit[0]*3600+timesplit[1]*60+timesplit[2] if len(timesplit)==3 else timesplit[0]*60+timesplit[1] if field == "modules": #New modules have to be joined to the old ones value = (default_values.modules + "\n" + match.string).strip() setattr(default_values, field, value) break for regex in DMG_LINE_TYPES: #Finalize entry values field = DMG_LINE_TYPES[regex] change_text(getattr(self, field+"_box"), getattr(default_values, field)) change_text(self.iden_box, default_values.iden) #Iden doesn't have an entry for variable in DMG_TOGGLABLES: #Finalize checkbox values var_name = DMG_TOGGLABLES[variable][0] getattr(self, var_name).set(getattr(default_values, var_name)) # ran when enter button is pressed, creates a new AssetFile and then tells you it worked if it passes the sanity check def createMission(self, iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing): created_mission = AssetFile() if created_mission.enter(iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing): showinfo(title="You did it!", message="Mission file created and downloaded.") def main(): Gui() if __name__ == "__main__": main()
__init__
identifier_name
Mission_Util_V01.py
import re from os import sep, path from traceback import format_exc import tkinter as tk from tkinter import ttk from tkinter.messagebox import showinfo, showerror from tkinter.scrolledtext import ScrolledText from tkinter.filedialog import askopenfilename # Game takes vanillas based on their index in this list, so no touchy VANILLAS = [ "Empty", "Timer", "Wires", "BigButton", "Keypad", "Simon", "WhosOnFirst", "Memory", "Morse", "Venn", "WireSequence", "Maze", "Password", "NeedyVentGas", "NeedyCapacitor", "NeedyKnob"] DMG_LINE_TYPES = { r"^\/\/\/\/ (.*?)$": "name", r"^\/\/\/ (.*?)$": "description", r"^((\d+):)?(\d+):(\d+)$": "time_limit", r"^(\d+)X$": "strikes", r"^needyactivationtime:(\d+)$": "needy_activation_time", r"^widgets:(\d+)$": "widgets", r"^!?((\d+)\s?\*\s?)?(.*?)$": "modules" } DMG_IGNORE = ["room:", "factory:", "mode:"] #Ignore these as there are no such fields in a mission asset DMG_TOGGLABLES = { #Values for checkboxes (DMG_Line: (key, value)) "frontonly": ("front_only", 1), "nopacing": ("pacing", 0) } def change_text(widget, text): try: #ScrolledText widget.delete("1.0", tk.END) widget.insert("1.0", text) except tk.TclError: #Bad entry index - Entry widget.delete(0, tk.END) widget.insert(0, text) class AssetFile: # default settings/variable init def __init__(self): self.iden = "mission" self.name = "Mission" self.description = "a mission" self.time_limit = "300" self.strikes = "3" self.needy_activation_time = "90" self.front_only = 0 self.widgets = "5" self.modules = "" self.separator = "\n" self.pacing = 1 # ran when pressing the create button, returns true or false based on sanity() def enter(self, iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing): # takes all of the inputted info and puts it into AssetFile's variables if they weren't left blank self.iden = iden.strip() if iden.strip() != "" else self.iden self.name = name.strip() if name.strip() != "" else self.name self.description = description.strip() if "\'{}\'".format(description.strip()) != "" else self.description self.time_limit = time_limit.strip() if time_limit.strip() != "" else self.time_limit self.strikes = strikes.strip() if strikes.strip() != "" else self.strikes self.needy_activation_time = needy_activation_time.strip() if needy_activation_time.strip() != "" else self.needy_activation_time self.front_only = front_only self.widgets = widgets.strip() if widgets.strip() != "" else self.widgets self.modules = modules.strip() if modules.strip() != "" else self.modules switcher = {"newlines": "\n", "spaces": " ", "tabs": "\t"} self.separator = switcher.get(separator, "\n") self.pacing = pacing # stops the process if any of the variables don't pass the sanity check if not self.sanity(): return False # string of pain, the whole asset file before the modlist is written here retstring = "%YAML 1.1\n%TAG !u! tag:unity3d.com,2011:\n--- !u!114 &11400000\nMonoBehaviour:\n m_ObjectHideFlags: 0\n m_PrefabParentObject: {{fileID: 0}}\n m_PrefabInternal: {{fileID: 0}}\n m_GameObject: {{fileID: 0}}\n m_Enabled: 1\n m_EditorHideFlags: 0\n m_Script: {{fileID: -548183353, guid: 45b809be76fd7a3468b6f517bced6f28, type: 3}}\n m_Name: {}\n m_EditorClassIdentifier: {}\n DisplayName: {}\n Description: {}\n GeneratorSetting:\n TimeLimit: {}\n NumStrikes: {}\n TimeBeforeNeedyActivation: {}\n FrontFaceOnly: {}\n OptionalWidgetCount: {}\n ComponentPools:".format(self.iden, self.iden, self.name, self.description, self.time_limit, self.strikes, self.needy_activation_time, str(self.front_only), self.widgets) modlist = self.modules.split(self.separator) for i in range(len(modlist)): count = 1 # how many of current module there is vann = 1 # whether current module is vanilla, 1 is vanilla, 0 is modded component = "" # vanilla modules are put in ComponentTypes rather than ModTypes, so seperate string to go under ComponentTypes modstring = " []" # this goes under ModTypes, default without mods is here # this counts modules if more than one # example 2*module would set count to 2 and then continue with everything after 2* if "*" in modlist[i]: ind = modlist[i].index("*") count = modlist[i][:ind] modlist[i] = modlist[i][ind+1:] # this removes the formatting of [] around pools if "[" in modlist[i]: modlist[i] = modlist[i][1:-1] # even non-pooled modules are treated like a pool with one module, since they are formatted the same pooled = modlist[i].split(",") for module in pooled: # vanilla formatting: add 0{}000000 to ComponentTypes, where {} is the hex index in the earlier VANILLAS string if module in VANILLAS: temp = str(hex(VANILLAS.index(module)))[2:] component += "0{}000000".format(temp) else: vann = 0 # set to zero because if there are mods, the default of " []" is wrong modstring += "\n - {}".format(module) # removes default " []" if vann == 0: modstring = modstring[3:] # finally adds the count, compenent, and modstring to what goes in the file below the string of pain retstring += "\n - Count: {}\n AllowedSources: 2\n ComponentTypes: {}\n SpecialComponentType: 0\n ModTypes:{}".format(count, component, modstring) retstring += "\n PacingEventsEnabled: {}".format(str(self.pacing)) # pacing goes below the mods for some reason retstring += "\n # Mission Utility v0.2 by BlvdBroken" # version number for debugging # creates a .asset file with the name of the ID, since thats what Unity does as well f = open("{}.asset".format(self.iden), "w") f.write(retstring) f.close() #print(retstring) return True # returns True to show it passes the sanity check, important for createMission() in the Gui class # makes sure Unity nor your OS gets mad at you for your asset file, runs within enter() def sanity(self): # these first two are illegal characters in Win/Mac files as well as reserved filenames in Win illegalFileNameChars = ["<", ">", ":", "\"", "/", "\\", "|", "?", "*", " ", "."] illegalFileNames = ["CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9"] if (self.iden.upper() in illegalFileNames): showinfo(title="Error", message="Illegal file name.") return False for chara in illegalFileNameChars: if chara in self.iden: showinfo(title="Error", message="Illegal character in mission ID.") return False # next four yell at you for having non-numerics in numbered inputs: time limit, strikes, needy activation time, and widgets if not self.time_limit.isnumeric(): showinfo(title="Error", message="Illegal character in Time Limit.") return False if not self.strikes.isnumeric(): showinfo(title="Error", message="Illegal character in Strikes.") return False if not self.needy_activation_time.isnumeric(): showinfo(title="Error", message="Illegal character in Needy Activation Time.") return False if not self.widgets.isnumeric():
showinfo(title="Error", message="Illegal character in Widgets.") return False # TODO figure out what characters cause the descriptions to throw a fit, or what I can include to make them acceptable # currently most special characters in the description break the file according to Unity, even though it's the exact same format if you enter it through Unity return True class Gui(tk.Tk): # currently everything except createMission() runs in init, while not the best practice it makes the most sense for a GUI def __init__(self): super().__init__() self.title("Mission Asset Utility") self.geometry('900x750') iden = tk.StringVar() name = tk.StringVar() description = tk.StringVar() time_limit = tk.StringVar() strikes = tk.StringVar() needy_activation_time = tk.StringVar() self.front_only = tk.StringVar(value=0) widgets = tk.StringVar() #modules = tk.StringVar() separator = tk.StringVar() self.pacing = tk.StringVar(value=0) # using tk's grid functionality as it's very nice compared to other options # column 0 is for names while 1 is for inputs # row 8 is where module list goes self.columnconfigure(0, weight = 0) self.columnconfigure(1, weight = 20) self.rowconfigure(8, weight = 5) # sticky="WE" means it fills it's whole grid location left to right (west to east) iden_label = ttk.Label(self, text="Mission ID:") iden_label.grid(column=0, row=0, padx=10, pady=5) self.iden_box = ttk.Entry(self, textvariable=iden) self.iden_box.grid(column=1, row=0, sticky="WE", padx=10, pady=5) name_label = ttk.Label(self, text="Name:") name_label.grid(column=0, row=1, padx=10, pady=5) self.name_box = ttk.Entry(self, textvariable=name) self.name_box.grid(column=1, row=1, sticky="WE", padx=10, pady=5) description_label = ttk.Label(self, text="Description:") description_label.grid(column=0, row=2, padx=10, pady=5) self.description_box = ttk.Entry(self, textvariable=description) self.description_box.grid(column=1, row=2, sticky="WE", padx=10, pady=5) time_limit_label = ttk.Label(self, text="Time Limit:") time_limit_label.grid(column=0, row=3, padx=10, pady=5) self.time_limit_box = ttk.Entry(self, textvariable=time_limit) self.time_limit_box.grid(column=1, row=3, sticky="WE", padx=10, pady=5) strikes_label = ttk.Label(self, text="Strikes:") strikes_label.grid(column=0, row=4, padx=10, pady=5) self.strikes_box = ttk.Entry(self, textvariable=strikes) self.strikes_box.grid(column=1, row=4, sticky="WE", padx=10, pady=5) needy_activation_time_label = ttk.Label(self, text="Needy Activation Time:") needy_activation_time_label.grid(column=0, row=5, padx=10, pady=5) self.needy_activation_time_box = ttk.Entry(self, textvariable=needy_activation_time) self.needy_activation_time_box.grid(column=1, row=5, sticky="WE", padx=10, pady=5) widgets_label = ttk.Label(self, text="Widget Amount:") widgets_label.grid(column=0, row=6, padx=10, pady=5) self.widgets_box = ttk.Entry(self, textvariable=widgets) self.widgets_box.grid(column=1, row=6, sticky="WE", padx=10, pady=5) # TODO fix spaghetti code that was made in an attempt to make these look nice, don't ask what I was going for front_only_check = ttk.Checkbutton(self, text="Front Only", variable=self.front_only, onvalue=1, offvalue=0).grid(column=1, row=7, padx=10, pady=10) pacing_check = ttk.Checkbutton(self, text="Pacing Events", variable=self.pacing, onvalue=1, offvalue=0).grid(column=1, row=7, sticky="E", padx=10, pady=10, ipadx=100) # ScrolledText comes with a scrollbar, but it's only for up and down, so I add a Scrollbar to go left and right in case you are using spaces/tabs to separate modules_label = ttk.Label(self, text="Module List:") modules_label.grid(column=0, row=8, padx=10, pady=5) self.modules_box = ScrolledText(self, width=10, height=10, wrap=tk.NONE) self.modules_box.grid(column=1, row=8, sticky="NSEW", padx=10, pady=0) modules_scrollbar = ttk.Scrollbar(self, orient='horizontal', command=self.modules_box.xview) modules_scrollbar.grid(column=1, row=9, sticky="EW", padx=10) self.modules_box["xscrollcommand"] = modules_scrollbar.set # TODO make the sheet usable *before* you select this, no idea why this happens separator_label = ttk.Label(self, text="Separator:") separator_label.grid(column=0, row=10, padx=10, pady=5) separator_box = ttk.Combobox(self, textvariable=separator) separator_box['values'] = ["newlines", "spaces", "tabs"] separator_box['state'] = 'readonly' separator_box.grid(column=1, row=10, sticky="W", padx=10, pady=5) open_dmg_button = ttk.Button(self, text = "Open DMG mission", command = self.parse_dmg).grid(column=1, row=11, padx=10, pady=5) # runs createMission() with all of the info in the other boxes when pressed enter_button = ttk.Button(self, text="Create Asset File", command=lambda: self.createMission(iden.get(), name.get(), description.get(), time_limit.get(), strikes.get(), needy_activation_time.get(), self.front_only.get(), widgets.get(), self.modules_box.get("1.0", tk.END), separator.get(), self.pacing.get())).grid(column=1, row=12, padx=10, pady=5) # shows you the defaults and some important notes before you start showinfo(title="Info", message="Defaults:\n ID: mission\n Name: Mission\n Description: a mission\n Time Limit: 300\n Strikes: 3\n Needy Activation Time: 90\n Widgets: 5\n\nNote: All times are in seconds.\n\nThe module list should use the module ID's, which can be found at ktane.timwi.de") # this is important for the tk gui, I imagine it just runs a constant update self.mainloop() #Select DMG mission file and parse its values into the GUI inputs def parse_dmg(self): default_values = AssetFile() #Select and open file filename = askopenfilename(title = "Select DMG mission") default_values.iden = path.basename(filename).split(".")[0] #Set iden to be the name of the selected file lines = [] try: with open(filename, "r") as mission_file: lines = mission_file.readlines() except Exception as e: print(format_exc()) showerror(title="Mission read error", message="Couldn't read the selected file. Are you sure it's a DMG mission? ({})".format(type(e).__name__)) return #For each line, detect which property of a mission it matches, and change the value of the mission according to that for line in lines: line = line.strip() skip = False for pattern in DMG_IGNORE: if line.startswith(pattern): skip = True break if skip: continue if line in DMG_TOGGLABLES: #Change checkbox value setattr(default_values, *DMG_TOGGLABLES[line]) continue for regex in DMG_LINE_TYPES: #Change entry value match = re.search(regex, line) if match != None: field = DMG_LINE_TYPES[regex] value = match.group(1) if field == "time_limit": #Calculate seconds timesplit = tuple(map(int, match.string.split(":"))) value = timesplit[0]*3600+timesplit[1]*60+timesplit[2] if len(timesplit)==3 else timesplit[0]*60+timesplit[1] if field == "modules": #New modules have to be joined to the old ones value = (default_values.modules + "\n" + match.string).strip() setattr(default_values, field, value) break for regex in DMG_LINE_TYPES: #Finalize entry values field = DMG_LINE_TYPES[regex] change_text(getattr(self, field+"_box"), getattr(default_values, field)) change_text(self.iden_box, default_values.iden) #Iden doesn't have an entry for variable in DMG_TOGGLABLES: #Finalize checkbox values var_name = DMG_TOGGLABLES[variable][0] getattr(self, var_name).set(getattr(default_values, var_name)) # ran when enter button is pressed, creates a new AssetFile and then tells you it worked if it passes the sanity check def createMission(self, iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing): created_mission = AssetFile() if created_mission.enter(iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing): showinfo(title="You did it!", message="Mission file created and downloaded.") def main(): Gui() if __name__ == "__main__": main()
random_line_split
Mission_Util_V01.py
import re from os import sep, path from traceback import format_exc import tkinter as tk from tkinter import ttk from tkinter.messagebox import showinfo, showerror from tkinter.scrolledtext import ScrolledText from tkinter.filedialog import askopenfilename # Game takes vanillas based on their index in this list, so no touchy VANILLAS = [ "Empty", "Timer", "Wires", "BigButton", "Keypad", "Simon", "WhosOnFirst", "Memory", "Morse", "Venn", "WireSequence", "Maze", "Password", "NeedyVentGas", "NeedyCapacitor", "NeedyKnob"] DMG_LINE_TYPES = { r"^\/\/\/\/ (.*?)$": "name", r"^\/\/\/ (.*?)$": "description", r"^((\d+):)?(\d+):(\d+)$": "time_limit", r"^(\d+)X$": "strikes", r"^needyactivationtime:(\d+)$": "needy_activation_time", r"^widgets:(\d+)$": "widgets", r"^!?((\d+)\s?\*\s?)?(.*?)$": "modules" } DMG_IGNORE = ["room:", "factory:", "mode:"] #Ignore these as there are no such fields in a mission asset DMG_TOGGLABLES = { #Values for checkboxes (DMG_Line: (key, value)) "frontonly": ("front_only", 1), "nopacing": ("pacing", 0) } def change_text(widget, text):
class AssetFile: # default settings/variable init def __init__(self): self.iden = "mission" self.name = "Mission" self.description = "a mission" self.time_limit = "300" self.strikes = "3" self.needy_activation_time = "90" self.front_only = 0 self.widgets = "5" self.modules = "" self.separator = "\n" self.pacing = 1 # ran when pressing the create button, returns true or false based on sanity() def enter(self, iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing): # takes all of the inputted info and puts it into AssetFile's variables if they weren't left blank self.iden = iden.strip() if iden.strip() != "" else self.iden self.name = name.strip() if name.strip() != "" else self.name self.description = description.strip() if "\'{}\'".format(description.strip()) != "" else self.description self.time_limit = time_limit.strip() if time_limit.strip() != "" else self.time_limit self.strikes = strikes.strip() if strikes.strip() != "" else self.strikes self.needy_activation_time = needy_activation_time.strip() if needy_activation_time.strip() != "" else self.needy_activation_time self.front_only = front_only self.widgets = widgets.strip() if widgets.strip() != "" else self.widgets self.modules = modules.strip() if modules.strip() != "" else self.modules switcher = {"newlines": "\n", "spaces": " ", "tabs": "\t"} self.separator = switcher.get(separator, "\n") self.pacing = pacing # stops the process if any of the variables don't pass the sanity check if not self.sanity(): return False # string of pain, the whole asset file before the modlist is written here retstring = "%YAML 1.1\n%TAG !u! tag:unity3d.com,2011:\n--- !u!114 &11400000\nMonoBehaviour:\n m_ObjectHideFlags: 0\n m_PrefabParentObject: {{fileID: 0}}\n m_PrefabInternal: {{fileID: 0}}\n m_GameObject: {{fileID: 0}}\n m_Enabled: 1\n m_EditorHideFlags: 0\n m_Script: {{fileID: -548183353, guid: 45b809be76fd7a3468b6f517bced6f28, type: 3}}\n m_Name: {}\n m_EditorClassIdentifier: {}\n DisplayName: {}\n Description: {}\n GeneratorSetting:\n TimeLimit: {}\n NumStrikes: {}\n TimeBeforeNeedyActivation: {}\n FrontFaceOnly: {}\n OptionalWidgetCount: {}\n ComponentPools:".format(self.iden, self.iden, self.name, self.description, self.time_limit, self.strikes, self.needy_activation_time, str(self.front_only), self.widgets) modlist = self.modules.split(self.separator) for i in range(len(modlist)): count = 1 # how many of current module there is vann = 1 # whether current module is vanilla, 1 is vanilla, 0 is modded component = "" # vanilla modules are put in ComponentTypes rather than ModTypes, so seperate string to go under ComponentTypes modstring = " []" # this goes under ModTypes, default without mods is here # this counts modules if more than one # example 2*module would set count to 2 and then continue with everything after 2* if "*" in modlist[i]: ind = modlist[i].index("*") count = modlist[i][:ind] modlist[i] = modlist[i][ind+1:] # this removes the formatting of [] around pools if "[" in modlist[i]: modlist[i] = modlist[i][1:-1] # even non-pooled modules are treated like a pool with one module, since they are formatted the same pooled = modlist[i].split(",") for module in pooled: # vanilla formatting: add 0{}000000 to ComponentTypes, where {} is the hex index in the earlier VANILLAS string if module in VANILLAS: temp = str(hex(VANILLAS.index(module)))[2:] component += "0{}000000".format(temp) else: vann = 0 # set to zero because if there are mods, the default of " []" is wrong modstring += "\n - {}".format(module) # removes default " []" if vann == 0: modstring = modstring[3:] # finally adds the count, compenent, and modstring to what goes in the file below the string of pain retstring += "\n - Count: {}\n AllowedSources: 2\n ComponentTypes: {}\n SpecialComponentType: 0\n ModTypes:{}".format(count, component, modstring) retstring += "\n PacingEventsEnabled: {}".format(str(self.pacing)) # pacing goes below the mods for some reason retstring += "\n # Mission Utility v0.2 by BlvdBroken" # version number for debugging # creates a .asset file with the name of the ID, since thats what Unity does as well f = open("{}.asset".format(self.iden), "w") f.write(retstring) f.close() #print(retstring) return True # returns True to show it passes the sanity check, important for createMission() in the Gui class # makes sure Unity nor your OS gets mad at you for your asset file, runs within enter() def sanity(self): # these first two are illegal characters in Win/Mac files as well as reserved filenames in Win illegalFileNameChars = ["<", ">", ":", "\"", "/", "\\", "|", "?", "*", " ", "."] illegalFileNames = ["CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9"] if (self.iden.upper() in illegalFileNames): showinfo(title="Error", message="Illegal file name.") return False for chara in illegalFileNameChars: if chara in self.iden: showinfo(title="Error", message="Illegal character in mission ID.") return False # next four yell at you for having non-numerics in numbered inputs: time limit, strikes, needy activation time, and widgets if not self.time_limit.isnumeric(): showinfo(title="Error", message="Illegal character in Time Limit.") return False if not self.strikes.isnumeric(): showinfo(title="Error", message="Illegal character in Strikes.") return False if not self.needy_activation_time.isnumeric(): showinfo(title="Error", message="Illegal character in Needy Activation Time.") return False if not self.widgets.isnumeric(): showinfo(title="Error", message="Illegal character in Widgets.") return False # TODO figure out what characters cause the descriptions to throw a fit, or what I can include to make them acceptable # currently most special characters in the description break the file according to Unity, even though it's the exact same format if you enter it through Unity return True class Gui(tk.Tk): # currently everything except createMission() runs in init, while not the best practice it makes the most sense for a GUI def __init__(self): super().__init__() self.title("Mission Asset Utility") self.geometry('900x750') iden = tk.StringVar() name = tk.StringVar() description = tk.StringVar() time_limit = tk.StringVar() strikes = tk.StringVar() needy_activation_time = tk.StringVar() self.front_only = tk.StringVar(value=0) widgets = tk.StringVar() #modules = tk.StringVar() separator = tk.StringVar() self.pacing = tk.StringVar(value=0) # using tk's grid functionality as it's very nice compared to other options # column 0 is for names while 1 is for inputs # row 8 is where module list goes self.columnconfigure(0, weight = 0) self.columnconfigure(1, weight = 20) self.rowconfigure(8, weight = 5) # sticky="WE" means it fills it's whole grid location left to right (west to east) iden_label = ttk.Label(self, text="Mission ID:") iden_label.grid(column=0, row=0, padx=10, pady=5) self.iden_box = ttk.Entry(self, textvariable=iden) self.iden_box.grid(column=1, row=0, sticky="WE", padx=10, pady=5) name_label = ttk.Label(self, text="Name:") name_label.grid(column=0, row=1, padx=10, pady=5) self.name_box = ttk.Entry(self, textvariable=name) self.name_box.grid(column=1, row=1, sticky="WE", padx=10, pady=5) description_label = ttk.Label(self, text="Description:") description_label.grid(column=0, row=2, padx=10, pady=5) self.description_box = ttk.Entry(self, textvariable=description) self.description_box.grid(column=1, row=2, sticky="WE", padx=10, pady=5) time_limit_label = ttk.Label(self, text="Time Limit:") time_limit_label.grid(column=0, row=3, padx=10, pady=5) self.time_limit_box = ttk.Entry(self, textvariable=time_limit) self.time_limit_box.grid(column=1, row=3, sticky="WE", padx=10, pady=5) strikes_label = ttk.Label(self, text="Strikes:") strikes_label.grid(column=0, row=4, padx=10, pady=5) self.strikes_box = ttk.Entry(self, textvariable=strikes) self.strikes_box.grid(column=1, row=4, sticky="WE", padx=10, pady=5) needy_activation_time_label = ttk.Label(self, text="Needy Activation Time:") needy_activation_time_label.grid(column=0, row=5, padx=10, pady=5) self.needy_activation_time_box = ttk.Entry(self, textvariable=needy_activation_time) self.needy_activation_time_box.grid(column=1, row=5, sticky="WE", padx=10, pady=5) widgets_label = ttk.Label(self, text="Widget Amount:") widgets_label.grid(column=0, row=6, padx=10, pady=5) self.widgets_box = ttk.Entry(self, textvariable=widgets) self.widgets_box.grid(column=1, row=6, sticky="WE", padx=10, pady=5) # TODO fix spaghetti code that was made in an attempt to make these look nice, don't ask what I was going for front_only_check = ttk.Checkbutton(self, text="Front Only", variable=self.front_only, onvalue=1, offvalue=0).grid(column=1, row=7, padx=10, pady=10) pacing_check = ttk.Checkbutton(self, text="Pacing Events", variable=self.pacing, onvalue=1, offvalue=0).grid(column=1, row=7, sticky="E", padx=10, pady=10, ipadx=100) # ScrolledText comes with a scrollbar, but it's only for up and down, so I add a Scrollbar to go left and right in case you are using spaces/tabs to separate modules_label = ttk.Label(self, text="Module List:") modules_label.grid(column=0, row=8, padx=10, pady=5) self.modules_box = ScrolledText(self, width=10, height=10, wrap=tk.NONE) self.modules_box.grid(column=1, row=8, sticky="NSEW", padx=10, pady=0) modules_scrollbar = ttk.Scrollbar(self, orient='horizontal', command=self.modules_box.xview) modules_scrollbar.grid(column=1, row=9, sticky="EW", padx=10) self.modules_box["xscrollcommand"] = modules_scrollbar.set # TODO make the sheet usable *before* you select this, no idea why this happens separator_label = ttk.Label(self, text="Separator:") separator_label.grid(column=0, row=10, padx=10, pady=5) separator_box = ttk.Combobox(self, textvariable=separator) separator_box['values'] = ["newlines", "spaces", "tabs"] separator_box['state'] = 'readonly' separator_box.grid(column=1, row=10, sticky="W", padx=10, pady=5) open_dmg_button = ttk.Button(self, text = "Open DMG mission", command = self.parse_dmg).grid(column=1, row=11, padx=10, pady=5) # runs createMission() with all of the info in the other boxes when pressed enter_button = ttk.Button(self, text="Create Asset File", command=lambda: self.createMission(iden.get(), name.get(), description.get(), time_limit.get(), strikes.get(), needy_activation_time.get(), self.front_only.get(), widgets.get(), self.modules_box.get("1.0", tk.END), separator.get(), self.pacing.get())).grid(column=1, row=12, padx=10, pady=5) # shows you the defaults and some important notes before you start showinfo(title="Info", message="Defaults:\n ID: mission\n Name: Mission\n Description: a mission\n Time Limit: 300\n Strikes: 3\n Needy Activation Time: 90\n Widgets: 5\n\nNote: All times are in seconds.\n\nThe module list should use the module ID's, which can be found at ktane.timwi.de") # this is important for the tk gui, I imagine it just runs a constant update self.mainloop() #Select DMG mission file and parse its values into the GUI inputs def parse_dmg(self): default_values = AssetFile() #Select and open file filename = askopenfilename(title = "Select DMG mission") default_values.iden = path.basename(filename).split(".")[0] #Set iden to be the name of the selected file lines = [] try: with open(filename, "r") as mission_file: lines = mission_file.readlines() except Exception as e: print(format_exc()) showerror(title="Mission read error", message="Couldn't read the selected file. Are you sure it's a DMG mission? ({})".format(type(e).__name__)) return #For each line, detect which property of a mission it matches, and change the value of the mission according to that for line in lines: line = line.strip() skip = False for pattern in DMG_IGNORE: if line.startswith(pattern): skip = True break if skip: continue if line in DMG_TOGGLABLES: #Change checkbox value setattr(default_values, *DMG_TOGGLABLES[line]) continue for regex in DMG_LINE_TYPES: #Change entry value match = re.search(regex, line) if match != None: field = DMG_LINE_TYPES[regex] value = match.group(1) if field == "time_limit": #Calculate seconds timesplit = tuple(map(int, match.string.split(":"))) value = timesplit[0]*3600+timesplit[1]*60+timesplit[2] if len(timesplit)==3 else timesplit[0]*60+timesplit[1] if field == "modules": #New modules have to be joined to the old ones value = (default_values.modules + "\n" + match.string).strip() setattr(default_values, field, value) break for regex in DMG_LINE_TYPES: #Finalize entry values field = DMG_LINE_TYPES[regex] change_text(getattr(self, field+"_box"), getattr(default_values, field)) change_text(self.iden_box, default_values.iden) #Iden doesn't have an entry for variable in DMG_TOGGLABLES: #Finalize checkbox values var_name = DMG_TOGGLABLES[variable][0] getattr(self, var_name).set(getattr(default_values, var_name)) # ran when enter button is pressed, creates a new AssetFile and then tells you it worked if it passes the sanity check def createMission(self, iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing): created_mission = AssetFile() if created_mission.enter(iden, name, description, time_limit, strikes, needy_activation_time, front_only, widgets, modules, separator, pacing): showinfo(title="You did it!", message="Mission file created and downloaded.") def main(): Gui() if __name__ == "__main__": main()
try: #ScrolledText widget.delete("1.0", tk.END) widget.insert("1.0", text) except tk.TclError: #Bad entry index - Entry widget.delete(0, tk.END) widget.insert(0, text)
identifier_body
flood_order.rs
/* This tool is part of the WhiteboxTools geospatial analysis library. Authors: Dr. John Lindsay Created: 12/07/2017 Last Modified: 12/10/2018 License: MIT */ use whitebox_raster::*; use whitebox_common::structures::Array2D; use crate::tools::*; use std::cmp::Ordering; use std::collections::BinaryHeap; use std::collections::VecDeque; use std::env; use std::f64; use std::i32; use std::io::{Error, ErrorKind}; use std::path; /// This tool takes an input digital elevation model (DEM) and creates an output raster where every grid cell /// contains the flood order of that cell within the DEM. The flood order is the sequence of grid cells that /// are encountered during a search, starting from the raster grid edges and the lowest grid cell, moving inward /// at increasing elevations. This is in fact similar to how the highly efficient Wang and Liu (2006) depression /// filling algorithm and the Breach Depressions (Fast) operates. The output flood order raster contains the /// sequential order, from lowest edge cell to the highest pixel in the DEM. /// /// Like the `FillDepressions` tool, `FloodOrder` will read the entire DEM into memory. This may make the /// algorithm ill suited to processing massive DEMs except where the user's computer has substantial memory /// (RAM) resources. /// /// # Reference /// Wang, L., and Liu, H. (2006). An efficient method for identifying and filling surface depressions in digital /// elevation models for hydrologic analysis and modelling. International Journal of Geographical Information Science, /// 20(2), 193-213. /// /// # See Also /// `FillDepressions` pub struct FloodOrder { name: String, description: String, toolbox: String, parameters: Vec<ToolParameter>, example_usage: String, } impl FloodOrder { pub fn new() -> FloodOrder { // public constructor let name = "FloodOrder".to_string(); let toolbox = "Hydrological Analysis".to_string(); let description = "Assigns each DEM grid cell its order in the sequence of inundations that are encountered during a search starting from the edges, moving inward at increasing elevations.".to_string(); let mut parameters = vec![]; parameters.push(ToolParameter { name: "Input File".to_owned(), flags: vec!["-i".to_owned(), "--dem".to_owned()], description: "Input raster DEM file.".to_owned(), parameter_type: ParameterType::ExistingFile(ParameterFileType::Raster), default_value: None, optional: false, }); parameters.push(ToolParameter { name: "Output File".to_owned(), flags: vec!["-o".to_owned(), "--output".to_owned()], description: "Output raster file.".to_owned(), parameter_type: ParameterType::NewFile(ParameterFileType::Raster), default_value: None, optional: false, }); let sep: String = path::MAIN_SEPARATOR.to_string(); let e = format!("{}", env::current_exe().unwrap().display()); let mut parent = env::current_exe().unwrap(); parent.pop(); let p = format!("{}", parent.display()); let mut short_exe = e .replace(&p, "") .replace(".exe", "") .replace(".", "") .replace(&sep, ""); if e.contains(".exe") { short_exe += ".exe"; } let usage = format!( ">>.*{0} -r={1} -v --wd=\"*path*to*data*\" --dem=DEM.tif -o=output.tif", short_exe, name ) .replace("*", &sep); FloodOrder { name: name, description: description, toolbox: toolbox, parameters: parameters, example_usage: usage, } } } impl WhiteboxTool for FloodOrder { fn get_source_file(&self) -> String { String::from(file!()) } fn get_tool_name(&self) -> String { self.name.clone() } fn get_tool_description(&self) -> String { self.description.clone() } fn get_tool_parameters(&self) -> String { match serde_json::to_string(&self.parameters) { Ok(json_str) => return format!("{{\"parameters\":{}}}", json_str), Err(err) => return format!("{:?}", err), } } fn get_example_usage(&self) -> String { self.example_usage.clone() } fn get_toolbox(&self) -> String { self.toolbox.clone() } fn run<'a>( &self, args: Vec<String>, working_directory: &'a str, verbose: bool, ) -> Result<(), Error> { let mut input_file = String::new(); let mut output_file = String::new(); if args.len() == 0 { return Err(Error::new( ErrorKind::InvalidInput, "Tool run with no parameters.", )); } for i in 0..args.len() { let mut arg = args[i].replace("\"", ""); arg = arg.replace("\'", ""); let cmd = arg.split("="); // in case an equals sign was used let vec = cmd.collect::<Vec<&str>>(); let mut keyval = false; if vec.len() > 1 { keyval = true; } if vec[0].to_lowercase() == "-i" || vec[0].to_lowercase() == "--input" || vec[0].to_lowercase() == "--dem" { if keyval { input_file = vec[1].to_string(); } else { input_file = args[i + 1].to_string(); } } else if vec[0].to_lowercase() == "-o" || vec[0].to_lowercase() == "--output" { if keyval { output_file = vec[1].to_string(); } else { output_file = args[i + 1].to_string(); } } } if verbose { let tool_name = self.get_tool_name(); let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28); // 28 = length of the 'Powered by' by statement. println!("{}", "*".repeat(welcome_len)); println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len())); println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28)); println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23)); println!("{}", "*".repeat(welcome_len)); } let sep: String = path::MAIN_SEPARATOR.to_string(); let mut progress: usize; let mut old_progress: usize = 1; if !input_file.contains(&sep) && !input_file.contains("/") { input_file = format!("{}{}", working_directory, input_file); } if !output_file.contains(&sep) && !output_file.contains("/") { output_file = format!("{}{}", working_directory, output_file); } if verbose { println!("Reading data...") }; let input = Raster::new(&input_file, "r")?; let start = Instant::now(); let rows = input.configs.rows as isize; let columns = input.configs.columns as isize; let num_cells = rows * columns; let nodata = input.configs.nodata; // let min_val = input.configs.minimum; // let elev_digits = ((input.configs.maximum - min_val) as i64).to_string().len(); // let elev_multiplier = 10.0_f64.powi((7 - elev_digits) as i32); // let mut small_num = 0f64; //1.0 / elev_multiplier as f64; let background_val = (i32::min_value() + 1) as f64; let mut filled_dem: Array2D<f64> = Array2D::new(rows, columns, background_val, nodata)?; let mut output = Raster::initialize_using_file(&output_file, &input); /* Find the data edges. This is complicated by the fact that DEMs frequently have nodata edges, whereby the DEM does not occupy the full extent of the raster. One approach to doing this would be simply to scan the raster, looking for cells that neighbour nodata values. However, this assumes that there are no interior nodata holes in the dataset. Instead, the approach used here is to perform a region-growing operation, looking for nodata values along the raster's edges. */ let mut queue: VecDeque<(isize, isize)> = VecDeque::with_capacity((rows * columns) as usize); for row in 0..rows { /* Note that this is only possible because Whitebox rasters allow you to address cells beyond the raster extent but return the nodata value for these regions. */ queue.push_back((row, -1)); queue.push_back((row, columns)); } for col in 0..columns { queue.push_back((-1, col)); queue.push_back((rows, col)); } /* minheap is the priority queue. Note that I've tested using integer-based priority values, by multiplying the elevations, but this didn't result in a significant performance gain over the use of f64s. */ let mut minheap = BinaryHeap::with_capacity((rows * columns) as usize); let mut num_solved_cells = 0; let mut zin_n: f64; // value of neighbour of row, col in input raster let mut zout: f64; // value of row, col in output raster let mut zout_n: f64; // value of neighbour of row, col in output raster let dx = [1, 1, 1, 0, -1, -1, -1, 0]; let dy = [-1, 0, 1, 1, 1, 0, -1, -1]; let (mut row, mut col): (isize, isize); let (mut row_n, mut col_n): (isize, isize); while !queue.is_empty() { let cell = queue.pop_front().unwrap(); row = cell.0; col = cell.1; for n in 0..8 { row_n = row + dy[n]; col_n = col + dx[n]; zin_n = input[(row_n, col_n)]; zout_n = filled_dem[(row_n, col_n)]; if zout_n == background_val { if zin_n == nodata { filled_dem[(row_n, col_n)] = nodata; output[(row_n, col_n)] = nodata; queue.push_back((row_n, col_n)); } else { filled_dem[(row_n, col_n)] = zin_n; // Push it onto the priority queue for the priority flood operation minheap.push(GridCell { row: row_n, column: col_n, priority: zin_n, }); } num_solved_cells += 1; } } if verbose { progress = (100.0_f64 * num_solved_cells as f64 / (num_cells - 1) as f64) as usize; if progress != old_progress { println!("progress: {}%", progress); old_progress = progress; } } } // Perform the priority flood operation. let mut order_val = 1f64; while !minheap.is_empty() { let cell = minheap.pop().expect("Error during pop operation."); row = cell.row; col = cell.column; zout = filled_dem[(row, col)]; output[(row, col)] = order_val; order_val += 1f64; for n in 0..8 { row_n = row + dy[n]; col_n = col + dx[n]; zout_n = filled_dem[(row_n, col_n)]; if zout_n == background_val { zin_n = input[(row_n, col_n)]; if zin_n != nodata { if zin_n < zout { zin_n = zout; } // We're in a depression. Raise the elevation. filled_dem[(row_n, col_n)] = zin_n; minheap.push(GridCell { row: row_n, column: col_n, priority: zin_n, }); } else { // Interior nodata cells are still treated as nodata and are not filled. output[(row_n, col_n)] = nodata; num_solved_cells += 1; } } } if verbose { num_solved_cells += 1; progress = (100.0_f64 * num_solved_cells as f64 / (num_cells - 1) as f64) as usize; if progress != old_progress { println!("Progress: {}%", progress); old_progress = progress; } } } let elapsed_time = get_formatted_elapsed_time(start); output.add_metadata_entry(format!( "Created by whitebox_tools\' {} tool", self.get_tool_name() )); output.add_metadata_entry(format!("Input file: {}", input_file)); output.add_metadata_entry(format!("Elapsed Time (excluding I/O): {}", elapsed_time)); if verbose { println!("Saving data...") }; let _ = match output.write() { Ok(_) => { if verbose { println!("Output file written") } } Err(e) => return Err(e), }; if verbose { println!( "{}", &format!("Elapsed Time (excluding I/O): {}", elapsed_time) ); } Ok(()) } } #[derive(PartialEq, Debug)] struct GridCell { row: isize, column: isize, // priority: usize, priority: f64, } impl Eq for GridCell {} impl PartialOrd for GridCell { fn
(&self, other: &Self) -> Option<Ordering> { // Some(other.priority.cmp(&self.priority)) other.priority.partial_cmp(&self.priority) } } impl Ord for GridCell { fn cmp(&self, other: &GridCell) -> Ordering { // other.priority.cmp(&self.priority) let ord = self.partial_cmp(other).unwrap(); match ord { Ordering::Greater => Ordering::Less, Ordering::Less => Ordering::Greater, Ordering::Equal => ord, } } }
partial_cmp
identifier_name
flood_order.rs
/* This tool is part of the WhiteboxTools geospatial analysis library. Authors: Dr. John Lindsay Created: 12/07/2017 Last Modified: 12/10/2018 License: MIT */ use whitebox_raster::*; use whitebox_common::structures::Array2D; use crate::tools::*; use std::cmp::Ordering; use std::collections::BinaryHeap; use std::collections::VecDeque; use std::env; use std::f64; use std::i32; use std::io::{Error, ErrorKind}; use std::path; /// This tool takes an input digital elevation model (DEM) and creates an output raster where every grid cell /// contains the flood order of that cell within the DEM. The flood order is the sequence of grid cells that /// are encountered during a search, starting from the raster grid edges and the lowest grid cell, moving inward /// at increasing elevations. This is in fact similar to how the highly efficient Wang and Liu (2006) depression /// filling algorithm and the Breach Depressions (Fast) operates. The output flood order raster contains the /// sequential order, from lowest edge cell to the highest pixel in the DEM. /// /// Like the `FillDepressions` tool, `FloodOrder` will read the entire DEM into memory. This may make the /// algorithm ill suited to processing massive DEMs except where the user's computer has substantial memory /// (RAM) resources. /// /// # Reference /// Wang, L., and Liu, H. (2006). An efficient method for identifying and filling surface depressions in digital /// elevation models for hydrologic analysis and modelling. International Journal of Geographical Information Science, /// 20(2), 193-213. /// /// # See Also /// `FillDepressions` pub struct FloodOrder { name: String, description: String, toolbox: String, parameters: Vec<ToolParameter>, example_usage: String, } impl FloodOrder { pub fn new() -> FloodOrder { // public constructor let name = "FloodOrder".to_string(); let toolbox = "Hydrological Analysis".to_string(); let description = "Assigns each DEM grid cell its order in the sequence of inundations that are encountered during a search starting from the edges, moving inward at increasing elevations.".to_string(); let mut parameters = vec![]; parameters.push(ToolParameter { name: "Input File".to_owned(), flags: vec!["-i".to_owned(), "--dem".to_owned()], description: "Input raster DEM file.".to_owned(), parameter_type: ParameterType::ExistingFile(ParameterFileType::Raster), default_value: None, optional: false, }); parameters.push(ToolParameter { name: "Output File".to_owned(), flags: vec!["-o".to_owned(), "--output".to_owned()], description: "Output raster file.".to_owned(), parameter_type: ParameterType::NewFile(ParameterFileType::Raster), default_value: None, optional: false, }); let sep: String = path::MAIN_SEPARATOR.to_string(); let e = format!("{}", env::current_exe().unwrap().display()); let mut parent = env::current_exe().unwrap(); parent.pop(); let p = format!("{}", parent.display()); let mut short_exe = e .replace(&p, "") .replace(".exe", "") .replace(".", "") .replace(&sep, ""); if e.contains(".exe") { short_exe += ".exe"; } let usage = format!( ">>.*{0} -r={1} -v --wd=\"*path*to*data*\" --dem=DEM.tif -o=output.tif", short_exe, name ) .replace("*", &sep); FloodOrder { name: name, description: description, toolbox: toolbox, parameters: parameters, example_usage: usage, } } } impl WhiteboxTool for FloodOrder { fn get_source_file(&self) -> String { String::from(file!()) } fn get_tool_name(&self) -> String { self.name.clone() } fn get_tool_description(&self) -> String { self.description.clone() } fn get_tool_parameters(&self) -> String { match serde_json::to_string(&self.parameters) { Ok(json_str) => return format!("{{\"parameters\":{}}}", json_str), Err(err) => return format!("{:?}", err), } } fn get_example_usage(&self) -> String { self.example_usage.clone() } fn get_toolbox(&self) -> String { self.toolbox.clone() } fn run<'a>( &self, args: Vec<String>, working_directory: &'a str, verbose: bool, ) -> Result<(), Error> { let mut input_file = String::new(); let mut output_file = String::new(); if args.len() == 0 { return Err(Error::new( ErrorKind::InvalidInput, "Tool run with no parameters.", )); } for i in 0..args.len() { let mut arg = args[i].replace("\"", ""); arg = arg.replace("\'", ""); let cmd = arg.split("="); // in case an equals sign was used let vec = cmd.collect::<Vec<&str>>(); let mut keyval = false; if vec.len() > 1 { keyval = true; } if vec[0].to_lowercase() == "-i" || vec[0].to_lowercase() == "--input" || vec[0].to_lowercase() == "--dem" { if keyval { input_file = vec[1].to_string(); } else { input_file = args[i + 1].to_string(); } } else if vec[0].to_lowercase() == "-o" || vec[0].to_lowercase() == "--output" { if keyval { output_file = vec[1].to_string(); } else { output_file = args[i + 1].to_string(); } } } if verbose { let tool_name = self.get_tool_name(); let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28); // 28 = length of the 'Powered by' by statement. println!("{}", "*".repeat(welcome_len)); println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len())); println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28)); println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23)); println!("{}", "*".repeat(welcome_len)); } let sep: String = path::MAIN_SEPARATOR.to_string(); let mut progress: usize; let mut old_progress: usize = 1; if !input_file.contains(&sep) && !input_file.contains("/") { input_file = format!("{}{}", working_directory, input_file); } if !output_file.contains(&sep) && !output_file.contains("/") { output_file = format!("{}{}", working_directory, output_file); } if verbose { println!("Reading data...") }; let input = Raster::new(&input_file, "r")?; let start = Instant::now(); let rows = input.configs.rows as isize; let columns = input.configs.columns as isize; let num_cells = rows * columns; let nodata = input.configs.nodata; // let min_val = input.configs.minimum; // let elev_digits = ((input.configs.maximum - min_val) as i64).to_string().len(); // let elev_multiplier = 10.0_f64.powi((7 - elev_digits) as i32); // let mut small_num = 0f64; //1.0 / elev_multiplier as f64; let background_val = (i32::min_value() + 1) as f64; let mut filled_dem: Array2D<f64> = Array2D::new(rows, columns, background_val, nodata)?; let mut output = Raster::initialize_using_file(&output_file, &input); /* Find the data edges. This is complicated by the fact that DEMs frequently have nodata edges, whereby the DEM does not occupy the full extent of the raster. One approach to doing this would be simply to scan the raster, looking for cells that neighbour nodata values. However, this assumes that there are no interior nodata holes in the dataset. Instead, the approach used here is to perform a region-growing operation, looking for nodata values along the raster's edges. */ let mut queue: VecDeque<(isize, isize)> = VecDeque::with_capacity((rows * columns) as usize); for row in 0..rows { /* Note that this is only possible because Whitebox rasters allow you to address cells beyond the raster extent but return the nodata value for these regions. */ queue.push_back((row, -1)); queue.push_back((row, columns)); } for col in 0..columns { queue.push_back((-1, col)); queue.push_back((rows, col)); } /* minheap is the priority queue. Note that I've tested using integer-based priority values, by multiplying the elevations, but this didn't result in a significant performance gain over the use of f64s. */ let mut minheap = BinaryHeap::with_capacity((rows * columns) as usize); let mut num_solved_cells = 0; let mut zin_n: f64; // value of neighbour of row, col in input raster let mut zout: f64; // value of row, col in output raster let mut zout_n: f64; // value of neighbour of row, col in output raster let dx = [1, 1, 1, 0, -1, -1, -1, 0]; let dy = [-1, 0, 1, 1, 1, 0, -1, -1]; let (mut row, mut col): (isize, isize); let (mut row_n, mut col_n): (isize, isize); while !queue.is_empty() { let cell = queue.pop_front().unwrap(); row = cell.0; col = cell.1; for n in 0..8 { row_n = row + dy[n]; col_n = col + dx[n]; zin_n = input[(row_n, col_n)]; zout_n = filled_dem[(row_n, col_n)]; if zout_n == background_val { if zin_n == nodata { filled_dem[(row_n, col_n)] = nodata; output[(row_n, col_n)] = nodata; queue.push_back((row_n, col_n)); } else { filled_dem[(row_n, col_n)] = zin_n; // Push it onto the priority queue for the priority flood operation minheap.push(GridCell { row: row_n, column: col_n, priority: zin_n, }); } num_solved_cells += 1; } } if verbose { progress = (100.0_f64 * num_solved_cells as f64 / (num_cells - 1) as f64) as usize; if progress != old_progress { println!("progress: {}%", progress); old_progress = progress; } } } // Perform the priority flood operation. let mut order_val = 1f64; while !minheap.is_empty() { let cell = minheap.pop().expect("Error during pop operation."); row = cell.row; col = cell.column; zout = filled_dem[(row, col)]; output[(row, col)] = order_val; order_val += 1f64; for n in 0..8 { row_n = row + dy[n]; col_n = col + dx[n]; zout_n = filled_dem[(row_n, col_n)]; if zout_n == background_val { zin_n = input[(row_n, col_n)]; if zin_n != nodata { if zin_n < zout { zin_n = zout; } // We're in a depression. Raise the elevation. filled_dem[(row_n, col_n)] = zin_n; minheap.push(GridCell { row: row_n, column: col_n, priority: zin_n, }); } else { // Interior nodata cells are still treated as nodata and are not filled. output[(row_n, col_n)] = nodata; num_solved_cells += 1; } } } if verbose { num_solved_cells += 1;
progress = (100.0_f64 * num_solved_cells as f64 / (num_cells - 1) as f64) as usize; if progress != old_progress { println!("Progress: {}%", progress); old_progress = progress; } } } let elapsed_time = get_formatted_elapsed_time(start); output.add_metadata_entry(format!( "Created by whitebox_tools\' {} tool", self.get_tool_name() )); output.add_metadata_entry(format!("Input file: {}", input_file)); output.add_metadata_entry(format!("Elapsed Time (excluding I/O): {}", elapsed_time)); if verbose { println!("Saving data...") }; let _ = match output.write() { Ok(_) => { if verbose { println!("Output file written") } } Err(e) => return Err(e), }; if verbose { println!( "{}", &format!("Elapsed Time (excluding I/O): {}", elapsed_time) ); } Ok(()) } } #[derive(PartialEq, Debug)] struct GridCell { row: isize, column: isize, // priority: usize, priority: f64, } impl Eq for GridCell {} impl PartialOrd for GridCell { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { // Some(other.priority.cmp(&self.priority)) other.priority.partial_cmp(&self.priority) } } impl Ord for GridCell { fn cmp(&self, other: &GridCell) -> Ordering { // other.priority.cmp(&self.priority) let ord = self.partial_cmp(other).unwrap(); match ord { Ordering::Greater => Ordering::Less, Ordering::Less => Ordering::Greater, Ordering::Equal => ord, } } }
random_line_split
flood_order.rs
/* This tool is part of the WhiteboxTools geospatial analysis library. Authors: Dr. John Lindsay Created: 12/07/2017 Last Modified: 12/10/2018 License: MIT */ use whitebox_raster::*; use whitebox_common::structures::Array2D; use crate::tools::*; use std::cmp::Ordering; use std::collections::BinaryHeap; use std::collections::VecDeque; use std::env; use std::f64; use std::i32; use std::io::{Error, ErrorKind}; use std::path; /// This tool takes an input digital elevation model (DEM) and creates an output raster where every grid cell /// contains the flood order of that cell within the DEM. The flood order is the sequence of grid cells that /// are encountered during a search, starting from the raster grid edges and the lowest grid cell, moving inward /// at increasing elevations. This is in fact similar to how the highly efficient Wang and Liu (2006) depression /// filling algorithm and the Breach Depressions (Fast) operates. The output flood order raster contains the /// sequential order, from lowest edge cell to the highest pixel in the DEM. /// /// Like the `FillDepressions` tool, `FloodOrder` will read the entire DEM into memory. This may make the /// algorithm ill suited to processing massive DEMs except where the user's computer has substantial memory /// (RAM) resources. /// /// # Reference /// Wang, L., and Liu, H. (2006). An efficient method for identifying and filling surface depressions in digital /// elevation models for hydrologic analysis and modelling. International Journal of Geographical Information Science, /// 20(2), 193-213. /// /// # See Also /// `FillDepressions` pub struct FloodOrder { name: String, description: String, toolbox: String, parameters: Vec<ToolParameter>, example_usage: String, } impl FloodOrder { pub fn new() -> FloodOrder { // public constructor let name = "FloodOrder".to_string(); let toolbox = "Hydrological Analysis".to_string(); let description = "Assigns each DEM grid cell its order in the sequence of inundations that are encountered during a search starting from the edges, moving inward at increasing elevations.".to_string(); let mut parameters = vec![]; parameters.push(ToolParameter { name: "Input File".to_owned(), flags: vec!["-i".to_owned(), "--dem".to_owned()], description: "Input raster DEM file.".to_owned(), parameter_type: ParameterType::ExistingFile(ParameterFileType::Raster), default_value: None, optional: false, }); parameters.push(ToolParameter { name: "Output File".to_owned(), flags: vec!["-o".to_owned(), "--output".to_owned()], description: "Output raster file.".to_owned(), parameter_type: ParameterType::NewFile(ParameterFileType::Raster), default_value: None, optional: false, }); let sep: String = path::MAIN_SEPARATOR.to_string(); let e = format!("{}", env::current_exe().unwrap().display()); let mut parent = env::current_exe().unwrap(); parent.pop(); let p = format!("{}", parent.display()); let mut short_exe = e .replace(&p, "") .replace(".exe", "") .replace(".", "") .replace(&sep, ""); if e.contains(".exe") { short_exe += ".exe"; } let usage = format!( ">>.*{0} -r={1} -v --wd=\"*path*to*data*\" --dem=DEM.tif -o=output.tif", short_exe, name ) .replace("*", &sep); FloodOrder { name: name, description: description, toolbox: toolbox, parameters: parameters, example_usage: usage, } } } impl WhiteboxTool for FloodOrder { fn get_source_file(&self) -> String { String::from(file!()) } fn get_tool_name(&self) -> String { self.name.clone() } fn get_tool_description(&self) -> String { self.description.clone() } fn get_tool_parameters(&self) -> String { match serde_json::to_string(&self.parameters) { Ok(json_str) => return format!("{{\"parameters\":{}}}", json_str), Err(err) => return format!("{:?}", err), } } fn get_example_usage(&self) -> String { self.example_usage.clone() } fn get_toolbox(&self) -> String { self.toolbox.clone() } fn run<'a>( &self, args: Vec<String>, working_directory: &'a str, verbose: bool, ) -> Result<(), Error> { let mut input_file = String::new(); let mut output_file = String::new(); if args.len() == 0 { return Err(Error::new( ErrorKind::InvalidInput, "Tool run with no parameters.", )); } for i in 0..args.len() { let mut arg = args[i].replace("\"", ""); arg = arg.replace("\'", ""); let cmd = arg.split("="); // in case an equals sign was used let vec = cmd.collect::<Vec<&str>>(); let mut keyval = false; if vec.len() > 1 { keyval = true; } if vec[0].to_lowercase() == "-i" || vec[0].to_lowercase() == "--input" || vec[0].to_lowercase() == "--dem" { if keyval { input_file = vec[1].to_string(); } else { input_file = args[i + 1].to_string(); } } else if vec[0].to_lowercase() == "-o" || vec[0].to_lowercase() == "--output" { if keyval { output_file = vec[1].to_string(); } else { output_file = args[i + 1].to_string(); } } } if verbose { let tool_name = self.get_tool_name(); let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28); // 28 = length of the 'Powered by' by statement. println!("{}", "*".repeat(welcome_len)); println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len())); println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28)); println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23)); println!("{}", "*".repeat(welcome_len)); } let sep: String = path::MAIN_SEPARATOR.to_string(); let mut progress: usize; let mut old_progress: usize = 1; if !input_file.contains(&sep) && !input_file.contains("/") { input_file = format!("{}{}", working_directory, input_file); } if !output_file.contains(&sep) && !output_file.contains("/") { output_file = format!("{}{}", working_directory, output_file); } if verbose { println!("Reading data...") }; let input = Raster::new(&input_file, "r")?; let start = Instant::now(); let rows = input.configs.rows as isize; let columns = input.configs.columns as isize; let num_cells = rows * columns; let nodata = input.configs.nodata; // let min_val = input.configs.minimum; // let elev_digits = ((input.configs.maximum - min_val) as i64).to_string().len(); // let elev_multiplier = 10.0_f64.powi((7 - elev_digits) as i32); // let mut small_num = 0f64; //1.0 / elev_multiplier as f64; let background_val = (i32::min_value() + 1) as f64; let mut filled_dem: Array2D<f64> = Array2D::new(rows, columns, background_val, nodata)?; let mut output = Raster::initialize_using_file(&output_file, &input); /* Find the data edges. This is complicated by the fact that DEMs frequently have nodata edges, whereby the DEM does not occupy the full extent of the raster. One approach to doing this would be simply to scan the raster, looking for cells that neighbour nodata values. However, this assumes that there are no interior nodata holes in the dataset. Instead, the approach used here is to perform a region-growing operation, looking for nodata values along the raster's edges. */ let mut queue: VecDeque<(isize, isize)> = VecDeque::with_capacity((rows * columns) as usize); for row in 0..rows { /* Note that this is only possible because Whitebox rasters allow you to address cells beyond the raster extent but return the nodata value for these regions. */ queue.push_back((row, -1)); queue.push_back((row, columns)); } for col in 0..columns { queue.push_back((-1, col)); queue.push_back((rows, col)); } /* minheap is the priority queue. Note that I've tested using integer-based priority values, by multiplying the elevations, but this didn't result in a significant performance gain over the use of f64s. */ let mut minheap = BinaryHeap::with_capacity((rows * columns) as usize); let mut num_solved_cells = 0; let mut zin_n: f64; // value of neighbour of row, col in input raster let mut zout: f64; // value of row, col in output raster let mut zout_n: f64; // value of neighbour of row, col in output raster let dx = [1, 1, 1, 0, -1, -1, -1, 0]; let dy = [-1, 0, 1, 1, 1, 0, -1, -1]; let (mut row, mut col): (isize, isize); let (mut row_n, mut col_n): (isize, isize); while !queue.is_empty() { let cell = queue.pop_front().unwrap(); row = cell.0; col = cell.1; for n in 0..8 { row_n = row + dy[n]; col_n = col + dx[n]; zin_n = input[(row_n, col_n)]; zout_n = filled_dem[(row_n, col_n)]; if zout_n == background_val { if zin_n == nodata { filled_dem[(row_n, col_n)] = nodata; output[(row_n, col_n)] = nodata; queue.push_back((row_n, col_n)); } else { filled_dem[(row_n, col_n)] = zin_n; // Push it onto the priority queue for the priority flood operation minheap.push(GridCell { row: row_n, column: col_n, priority: zin_n, }); } num_solved_cells += 1; } } if verbose { progress = (100.0_f64 * num_solved_cells as f64 / (num_cells - 1) as f64) as usize; if progress != old_progress { println!("progress: {}%", progress); old_progress = progress; } } } // Perform the priority flood operation. let mut order_val = 1f64; while !minheap.is_empty() { let cell = minheap.pop().expect("Error during pop operation."); row = cell.row; col = cell.column; zout = filled_dem[(row, col)]; output[(row, col)] = order_val; order_val += 1f64; for n in 0..8 { row_n = row + dy[n]; col_n = col + dx[n]; zout_n = filled_dem[(row_n, col_n)]; if zout_n == background_val { zin_n = input[(row_n, col_n)]; if zin_n != nodata { if zin_n < zout { zin_n = zout; } // We're in a depression. Raise the elevation. filled_dem[(row_n, col_n)] = zin_n; minheap.push(GridCell { row: row_n, column: col_n, priority: zin_n, }); } else { // Interior nodata cells are still treated as nodata and are not filled. output[(row_n, col_n)] = nodata; num_solved_cells += 1; } } } if verbose { num_solved_cells += 1; progress = (100.0_f64 * num_solved_cells as f64 / (num_cells - 1) as f64) as usize; if progress != old_progress { println!("Progress: {}%", progress); old_progress = progress; } } } let elapsed_time = get_formatted_elapsed_time(start); output.add_metadata_entry(format!( "Created by whitebox_tools\' {} tool", self.get_tool_name() )); output.add_metadata_entry(format!("Input file: {}", input_file)); output.add_metadata_entry(format!("Elapsed Time (excluding I/O): {}", elapsed_time)); if verbose { println!("Saving data...") }; let _ = match output.write() { Ok(_) => { if verbose { println!("Output file written") } } Err(e) => return Err(e), }; if verbose { println!( "{}", &format!("Elapsed Time (excluding I/O): {}", elapsed_time) ); } Ok(()) } } #[derive(PartialEq, Debug)] struct GridCell { row: isize, column: isize, // priority: usize, priority: f64, } impl Eq for GridCell {} impl PartialOrd for GridCell { fn partial_cmp(&self, other: &Self) -> Option<Ordering>
} impl Ord for GridCell { fn cmp(&self, other: &GridCell) -> Ordering { // other.priority.cmp(&self.priority) let ord = self.partial_cmp(other).unwrap(); match ord { Ordering::Greater => Ordering::Less, Ordering::Less => Ordering::Greater, Ordering::Equal => ord, } } }
{ // Some(other.priority.cmp(&self.priority)) other.priority.partial_cmp(&self.priority) }
identifier_body
main.js
var app = {}, game; app.pixelRatio = window.devicePixelRatio || 1; app.ios = !!navigator.userAgent.match(/\(i[^;]+;( U;)? CPU.+Mac OS X/); app.weixin = navigator.userAgent.toLowerCase().match(/MicroMessenger/i)=="micromessenger"; app.width = window.innerWidth; app.height = window.innerHeight; app.isTouch = window.ontouchstart===undefined ? false : true; app.evtDown = app.isTouch?"touchstart":"mousedown"; app.evtMove = app.isTouch?"touchmove":"mousemove"; app.evtUp = app.isTouch?"touchend":"mouseup"; app.evtClick = app.isTouch?"tap":"click"; window.addEventListener('load', function(){ document.querySelector('.game').style.display = 'block'; initGame(); }, false); function initGame(){ //game = new Phaser.Game(app.width*app.pixelRatio*0.5, app.height*app.pixelRatio*0.5, Phaser.WEBGL, document.querySelector('.game'), { game = new Phaser.Game( 640, 1008, Phaser.WEBGL, document.querySelector('.game'), null, true ); game._state = { boot: { init: function(){ console.log('boot.init'); game.scale.scaleMode = Phaser.ScaleManager.EXACT_FIT; }, create: function(){ console.log('boot.create'); setTimeout(function(){ game.state.start('loading'); }, 100); }, shutdown: function(){ console.log('boot.shutdown'); } }, loading: { init: function(){ console.log('loading.init'); this._childs = {}; //进度条背景 this._childs.line = game.add.graphics(0, 0); this._childs.line.lineStyle(2, 0xffffff, 0.2); this._childs.line.moveTo(game.width*0.2, game.height*0.5); this._childs.line.lineTo(game.width*0.8, game.height*0.5); //进度条 this._childs.line2 = game.add.graphics(0, 0); this._childs.line2.lineStyle(2, 0xffffff, 1); //进度文字 this._childs.text = game.add.text(game.width*0.5, game.height*0.47, '0%'); this._childs.text.anchor.setTo(0.5, 0.5); this._childs.text.align = 'center'; this._childs.text.fontWeight = 'normal'; this._childs.text.fontSize = 24; this._childs.text.fill = '#fff'; }, preload: function(){ console.log('loading.preload'); var _this = this; game.load.onFileComplete.add(function(p){ _this._childs.text.text = p+'%'; _this._childs.line2.moveTo(game.width*0.2, game.height*0.5); _this._childs.line2.lineTo(game.width*0.2+(game.width*0.6*p*0.01), game.height*0.5); }); game.load.onLoadComplete.addOnce(function(){ game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true, 300).onComplete.addOnce(function(){ game.state.start('home'); }); }); game.load.image('loading', 'img/loading.png'); game.load.image('bg', 'img/bg.jpg'); game.load.image('btn', 'img/btn.png'); game.load.spritesheet('food', 'img/food.png', 32, 32, 3); game.load.image('title', 'img/title.png'); game.load.image('snake-head', 'img/snake-head.png'); game.load.image('snake-body', 'img/snake-body.png'); game.load.spritesheet('result', 'img/result.png', 32, 32, 3); }, shutdown: function(){ game.world.alpha=1; } }, home: { init: function(data){ this._result = data; }, create: function(){ console.log('home.create'); var temp, _this = this; this._childs = {}; //背景 this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg'); //标题 this._childs.title = game.add.sprite(0, 100, 'title'); this._childs.title.anchor.set(0.5); this._childs.title.position.set(game.width*0.5, game.height*0.2); //按钮 this._childs.btn = game.add.button(0,0,'btn'); this._childs.btn.anchor.set(0.5); this._childs.btn.position.set(game.width*0.5, this._result?game.height*0.85:game.height*0.75); temp = game.add.text(0,0, this._result?'再玩一次':'开始游戏'); temp.align = 'center'; temp.font = 'arial'; temp.fontWeight = 'normal'; temp.fontSize = 48; temp.fill = '#fff'; temp.anchor.setTo(0.5, 0.45); this._childs.btn.addChild(temp); this._childs.btn.onInputDown.add(function(el, e){ el.scale.set(0.97); el.getChildAt(0).fill = '#cfc'; }); this._childs.btn.onInputUp.add(function(el, e){ el.scale.set(1); el.getChildAt(0).fill = '#fff'; if(e.timeUp-e.timeDown<500){ game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){ game.state.start('play'); }); } }); //动画 game.add.tween(this._childs.title.scale).from({x:5,y:5}, 600, Phaser.Easing.Cubic.In, true); game.add.tween(this._childs.title).from({alpha:0, rotation:-Math.PI*2}, 600, Phaser.Easing.Cubic.In, true); game.add.tween(this._childs.btn).from({alpha:0,y:'+200'}, 300, Phaser.Easing.Cubic.Out, true, 700); //成绩 if(this._result){ this._childs.point = game.add.text(0,0,'0'); this._childs.point.align = 'center'; this._childs.point.fontSize = 72; this._childs.point.fill = '#fff'; this._childs.point.anchor.set(0.5); this._childs.point.position.set(game.width*0.5, this._childs.title.y+200); this._childs.point_line = game.add.graphics(game.width*0.5, this._childs.point.y+60); this._childs.point_line.lineStyle(3, 0xffffff, 0.75); this._childs.point_line.moveTo(-game.width*0.3, 0); this._childs.point_line.lineTo(game.width*0.3, 0); for(var i=1; i<=3; i++){ temp = game.add.text(game.width*0.42, this._childs.point_line.y-10+i*50, ' '); temp.font = 'arial'; temp.fontSize = 30; temp.fill = '#fff'; temp.fontWeight = 'normal'; temp.addChild(game.add.image(-50, 0, 'result', i-1)); this._childs['point'+i] = temp; } this._childs.point1.text = '身长 '+this._result.size+'米'; this._childs.point2.text = '吃掉 '+this._result.food+'个'; this._childs.point3.text = '用时 '+this._result.time+'秒'; game.add.tween(this._childs.point_line.scale).from({x:0}, 300, Phaser.Easing.Cubic.Out, true, 1000); game.add.tween(this._childs.point).from({y:'+50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1200); game.add.tween(this._childs.point1).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1500); game.add.tween(this._childs.point2).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1750); game.add.tween(this._childs.point3).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 2000); game.add.tween(this._result).from({point:0}, 1000, Phaser.Easing.Linear.None, true, 1400).onUpdateCallback(function(tween, percent, tweenData){ _this._childs.point.text = Math.floor(tween.target.point);
_this._childs.point.text = _this._result.point; }); } }, update: function(){ this._childs.bg.tilePosition.y+=1; }, shutdown: function(){ game.world.alpha=1; } }, play: { create: function(){ console.log('play.create'); var _this=this, temp; this._childs = {}; this._gdata = { is_end: false, time_begin: game.time.now, food_get: 0, snake_speed: 400, snake_path: [] } game.physics.startSystem(Phaser.Physics.ARCADE); //背景 this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg'); //食物 this._childs.foods = game.add.group(undefined, 'foods'); this._childs.foods.enableBody = true; this._childs.foods.physicsBodyType = Phaser.Physics.ARCADE; function addFood(){ if(_this._gdata.is_end || _this._childs.foods.length>10){ return; } var type = game.rnd.frac()>0.3 ? 0 : (game.rnd.frac()>0.4 ? 1 : 2); var temp = _this._childs.foods.create(game.rnd.between(20, game.width-20), game.rnd.between(20, game.height-20), 'food', type); temp.name = 'foot'+type; temp.anchor.set(0.5); temp.body.enable = false; game.add.tween(temp.scale).from({x:0, y:0}, 200, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){ if(_this._gdata.is_end){ return; } temp = 0.7; this.body.setSize(this.width*temp, this.height*temp, this.width*(1-temp)*0.5, this.height*(1-temp)*0.5); this.body.enable = true; game.time.events.add(Math.floor(Math.random()*1000)+1000, addFood); }, temp); if(type>0){ game.add.tween(temp).to({alpha:0}, 200, Phaser.Easing.Linear.None, true, 4000).onComplete.addOnce(function(e){ e.parent.remove(e, true); }); } } game.time.events.add(800, addFood); //蛇头 this._childs.snakeHead = game.add.sprite(0,0,'snake-head'); this._childs.snakeHead.anchor.set(0.5); this._childs.snakeHead.position.set(game.width*0.5, game.height); game.physics.arcade.enableBody(this._childs.snakeHead); temp = 0.6; this._childs.snakeHead.body.setSize(this._childs.snakeHead.width*temp, this._childs.snakeHead.height*temp, this._childs.snakeHead.width*(1-temp)*0.5, this._childs.snakeHead.height*(1-temp)*0.5); this._childs.snakeHead.body.allowRotation = false; this._childs.snakeHead.body.velocity.setTo(0, -this._gdata.snake_speed); //蛇身 this._childs.snakeBody = game.add.group(undefined, 'snakeBody'); for(var i=0;i<3;i++){ temp = game.add.image(0,0,'snake-body',0,this._childs.snakeBody); temp.anchor.set(0.5); temp.position.set(this._childs.snakeHead.x, this._childs.snakeHead.y); temp.angle_fix = temp.angle = Math.random()*360; } game.world.swapChildren(this._childs.snakeHead, this._childs.snakeBody); //交互层 this._childs.touchLayer = game.add.button(0,0); this._childs.touchLayer.width = game.width; this._childs.touchLayer.height = game.height; this._childs.touchLayer.onInputDown.add(function(el, e){ if(_this._gdata.is_end){ return; } var p = new Phaser.Point(e.position.x-_this._childs.snakeHead.x, e.position.y-_this._childs.snakeHead.y); p.normalize(); p.multiply(_this._gdata.snake_speed, _this._gdata.snake_speed); _this._gdata.snake_speed+=1; _this._childs.snakeHead.body.velocity.setTo(p.x, p.y); _this._childs.snakeHead.rotation = game.physics.arcade.angleToPointer(_this._childs.snakeHead, e)+Math.PI*0.5; }); //游戏结束 this._endAction = function(){ game.physics.arcade.isPaused = true; game.state.getCurrentState().state.pause(); game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true, 300).onComplete.addOnce(function(){ var result = { point: 0, size: ((_this._childs.snakeBody.length+1)*0.1).toFixed(1)*1, time: ((game.time.now-_this._gdata.time_begin)*0.001).toFixed(1)*1, food: _this._gdata.food_get }; result.point = Math.ceil(result.size*100+result.time*2+result.food*25); game.world.remove(_this._childs.foods, true); game.world.remove(_this._childs.snakeHead, true); game.world.remove(_this._childs.snakeBody, true); game.state.start('home', false, false, result); }); } }, update: function(){ var _this = this; if(this._gdata.is_end){ return; } //出界 if(!this._childs.snakeHead.inWorld){ _this._gdata.is_end = true; _this._endAction(); return; } //吃食 game.physics.arcade.overlap(this._childs.snakeHead, this._childs.foods, function(a, b){ var temp, data; if(b.name == 'foot0'){ //蛇身增长 temp = game.add.image(0,0,'snake-body',0, _this._childs.snakeBody); data = _this._gdata.snake_path[temp.z*3+3] || {x:-999, y:-999}; temp.anchor.set(0.5); temp.position.set(data.x, data.y); temp.angle_fix = temp.angle = Math.random()*360; _this._gdata.food_get += 1; }else if(b.name == 'foot1'){ //蛇身减少 if(_this._childs.snakeBody.length>1){ _this._childs.snakeBody.remove(_this._childs.snakeBody.getBottom(), true); } }else if(b.name == 'foot2'){ //中毒死亡 _this._gdata.is_end = true; _this._endAction(); } b.parent.remove(b, true); }); //更新蛇身 this._gdata.snake_path.unshift({x: this._childs.snakeHead.x, y:this._childs.snakeHead.y, a:this._childs.snakeHead.angle}); if(this._gdata.snake_path.length>300){ this._gdata.snake_path.pop(); } this._childs.snakeBody.forEachExists(function(child){ var data = _this._gdata.snake_path[child.z*3+3]; if(!data){ return; } child.position.set(data.x, data.y); child.angle=data.angle+data.angle_fix; }); }, shutdown: function(){ game.world.alpha=1; } } } for(var i in game._state){ game.state.add(i, game._state[i], false); } game.state.start('boot'); };
}).onComplete.addOnce(function(){
random_line_split
main.js
var app = {}, game; app.pixelRatio = window.devicePixelRatio || 1; app.ios = !!navigator.userAgent.match(/\(i[^;]+;( U;)? CPU.+Mac OS X/); app.weixin = navigator.userAgent.toLowerCase().match(/MicroMessenger/i)=="micromessenger"; app.width = window.innerWidth; app.height = window.innerHeight; app.isTouch = window.ontouchstart===undefined ? false : true; app.evtDown = app.isTouch?"touchstart":"mousedown"; app.evtMove = app.isTouch?"touchmove":"mousemove"; app.evtUp = app.isTouch?"touchend":"mouseup"; app.evtClick = app.isTouch?"tap":"click"; window.addEventListener('load', function(){ document.querySelector('.game').style.display = 'block'; initGame(); }, false); function initGame(){ //game = new Phaser.Game(app.width*app.pixelRatio*0.5, app.height*app.pixelRatio*0.5, Phaser.WEBGL, document.querySelector('.game'), { game = new Phaser.Game( 640, 1008, Phaser.WEBGL, document.querySelector('.game'), null, true ); game._state = { boot: { init: function(){ console.log('boot.init'); game.scale.scaleMode = Phaser.ScaleManager.EXACT_FIT; }, create: function(){ console.log('boot.create'); setTimeout(function(){ game.state.start('loading'); }, 100); }, shutdown: function(){ console.log('boot.shutdown'); } }, loading: { init: function(){ console.log('loading.init'); this._childs = {}; //进度条背景 this._childs.line = game.add.graphics(0, 0); this._childs.line.lineStyle(2, 0xffffff, 0.2); this._childs.line.moveTo(game.width*0.2, game.height*0.5); this._childs.line.lineTo(game.width*0.8, game.height*0.5); //进度条 this._childs.line2 = game.add.graphics(0, 0); this._childs.line2.lineStyle(2, 0xffffff, 1); //进度文字 this._childs.text = game.add.text(game.width*0.5, game.height*0.47, '0%'); this._childs.text.anchor.setTo(0.5, 0.5); this._childs.text.align = 'center'; this._childs.text.fontWeight = 'normal'; this._childs.text.fontSize = 24; this._childs.text.fill = '#fff'; }, preload: function(){ console.log('loading.preload'); var _this = this; game.load.onFileComplete.add(function(p){ _this._childs.text.text = p+'%'; _this._childs.line2.moveTo(game.width*0.2, game.height*0.5); _this._childs.line2.lineTo(game.width*0.2+(game.width*0.6*p*0.01), game.height*0.5); }); game.load.onLoadComplete.addOnce(function(){ game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true, 300).onComplete.addOnce(function(){ game.state.start('home'); }); }); game.load.image('loading', 'img/loading.png'); game.load.image('bg', 'img/bg.jpg'); game.load.image('btn', 'img/btn.png'); game.load.spritesheet('food', 'img/food.png', 32, 32, 3); game.load.image('title', 'img/title.png'); game.load.image('snake-head', 'img/snake-head.png'); game.load.image('snake-body', 'img/snake-body.png'); game.load.spritesheet('result', 'img/result.png', 32, 32, 3); }, shutdown: function(){ game.world.alpha=1; } }, home: { init: function(data){ this._result = data; }, create: function(){ console.log('home.create'); var temp, _this = this; this._childs = {}; //背景 this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg'); //标题 this._childs.title = game.add.sprite(0, 100, 'title'); this._childs.title.anchor.set(0.5); this._childs.title.position.set(game.width*0.5, game.height*0.2); //按钮 this._childs.btn = game.add.button(0,0,'btn'); this._childs.btn.anchor.set(0.5); this._childs.btn.position.set(game.width*0.5, this._result?game.height*0.85:game.height*0.75); temp = game.add.text(0,0, this._result?'再玩一次':'开始游戏'); temp.align = 'center'; temp.font = 'arial'; temp.fontWeight = 'normal'; temp.fontSize = 48; temp.fill = '#fff'; temp.anchor.setTo(0.5, 0.45); this._childs.btn.addChild(temp); this._childs.btn.onInputDown.add(function(el, e){ el.scale.set(0.97); el.getChildAt(0).fill = '#cfc'; }); this._childs.btn.onInputUp.add(function(el, e){ el.scale.set(1); el.getChildAt(0).fill = '#fff'; if(e.timeUp-e.timeDown<500){ game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){ game.state.start('play'); }); } }); //动画 game.add.tween(this._childs.title.scale).from({x:5,y:5}, 600, Phaser.Easing.Cubic.In, true); game.add.tween(this._childs.title).from({alpha:0, rotation:-Math.PI*2}, 600, Phaser.Easing.Cubic.In, true); game.add.tween(this._childs.btn).from({alpha:0,y:'+200'}, 300, Phaser.Easing.Cubic.Out, true, 700); //成绩 if(this._result){ this._childs.point = game.add.text(0,0,'0'); this._childs.point.align = 'center'; this._childs.point.fontSize = 72; this._childs.point.fill = '#fff'; this._childs.point.anchor.set(0.5); this._childs.point.position.set(game.width*0.5, this._childs.title.y+200); this._childs.point_line = game.add.graphics(game.width*0.5, this._childs.point.y+60); this._childs.point_line.lineStyle(3, 0xffffff, 0.75); this._childs.point_line.moveTo(-game.width*0.3, 0); this._childs.point_line.lineTo(game.width*0.3, 0); for(var i=1; i<=3; i++){ temp = game.add.text(game.width*0.42, this._childs.point_line.y-10+i*50, ' '); temp.font = 'arial'; temp.fontSize = 30; temp.fill = '#fff'; temp.fontWeight = 'normal'; temp.addChild(game.add.image(-50, 0, 'result', i-1)); this._childs['point'+i] = temp; } this._childs.point1.text = '身长 '+this._result.size+'米'; this._childs.point2.text = '吃掉 '+this._result.food+'个'; this._childs.point3.text = '用时 '+this._result.time+'秒'; game.add.tween(this._childs.point_line.scale).from({x:0}, 300, Phaser.Easing.Cubic.Out, true, 1000); game.add.tween(this._childs.point).from({y:'+50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1200); game.add.tween(this._childs.point1).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1500); game.add.tween(this._childs.point2).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1750); game.add.tween(this._childs.point3).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 2000); game.add.tween(this._result).from({point:0}, 1000, Phaser.Easing.Linear.None, true, 1400).onUpdateCallback(function(tween, percent, tweenData){ _this._childs.point.text = Math.floor(tween.target.point); }).onComplete.addOnce(function(){ _this._childs.point.text = _this._result.point; }); } }, update: function(){ this._childs.bg.tilePosition.y+=1; }, shutdown: function(){ game.world.alpha=1; } }, play: { create: function(){ console.log('play.create'); var _this=this, temp; this._childs = {}; this._gdata = { is_end: false, time_begin: game.time.now, food_get: 0, snake_speed: 400, snake_path: [] } game.physics.startSystem(Phaser.Physics.ARCADE); //背景 this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg'); //食物 this._childs.foods = game.add.group(undefined, 'foods'); this._childs.foods.enableBody = true; this._childs.foods.physicsBodyType = Phaser.Physics.ARCADE; function addFood(){ if(_this._gdata.is_end || _this._childs.foods.length>10){ return; } var type = game.rnd.frac()>0.3 ? 0 : (game.rnd.frac()>0.4 ? 1 : 2);
emp = _this._childs.foods.create(game.rnd.between(20, game.width-20), game.rnd.between(20, game.height-20), 'food', type); temp.name = 'foot'+type; temp.anchor.set(0.5); temp.body.enable = false; game.add.tween(temp.scale).from({x:0, y:0}, 200, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){ if(_this._gdata.is_end){ return; } temp = 0.7; this.body.setSize(this.width*temp, this.height*temp, this.width*(1-temp)*0.5, this.height*(1-temp)*0.5); this.body.enable = true; game.time.events.add(Math.floor(Math.random()*1000)+1000, addFood); }, temp); if(type>0){ game.add.tween(temp).to({alpha:0}, 200, Phaser.Easing.Linear.None, true, 4000).onComplete.addOnce(function(e){ e.parent.remove(e, true); }); } } game.time.events.add(800, addFood); //蛇头 this._childs.snakeHead = game.add.sprite(0,0,'snake-head'); this._childs.snakeHead.anchor.set(0.5); this._childs.snakeHead.position.set(game.width*0.5, game.height); game.physics.arcade.enableBody(this._childs.snakeHead); temp = 0.6; this._childs.snakeHead.body.setSize(this._childs.snakeHead.width*temp, this._childs.snakeHead.height*temp, this._childs.snakeHead.width*(1-temp)*0.5, this._childs.snakeHead.height*(1-temp)*0.5); this._childs.snakeHead.body.allowRotation = false; this._childs.snakeHead.body.velocity.setTo(0, -this._gdata.snake_speed); //蛇身 this._childs.snakeBody = game.add.group(undefined, 'snakeBody'); for(var i=0;i<3;i++){ temp = game.add.image(0,0,'snake-body',0,this._childs.snakeBody); temp.anchor.set(0.5); temp.position.set(this._childs.snakeHead.x, this._childs.snakeHead.y); temp.angle_fix = temp.angle = Math.random()*360; } game.world.swapChildren(this._childs.snakeHead, this._childs.snakeBody); //交互层 this._childs.touchLayer = game.add.button(0,0); this._childs.touchLayer.width = game.width; this._childs.touchLayer.height = game.height; this._childs.touchLayer.onInputDown.add(function(el, e){ if(_this._gdata.is_end){ return; } var p = new Phaser.Point(e.position.x-_this._childs.snakeHead.x, e.position.y-_this._childs.snakeHead.y); p.normalize(); p.multiply(_this._gdata.snake_speed, _this._gdata.snake_speed); _this._gdata.snake_speed+=1; _this._childs.snakeHead.body.velocity.setTo(p.x, p.y); _this._childs.snakeHead.rotation = game.physics.arcade.angleToPointer(_this._childs.snakeHead, e)+Math.PI*0.5; }); //游戏结束 this._endAction = function(){ game.physics.arcade.isPaused = true; game.state.getCurrentState().state.pause(); game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true, 300).onComplete.addOnce(function(){ var result = { point: 0, size: ((_this._childs.snakeBody.length+1)*0.1).toFixed(1)*1, time: ((game.time.now-_this._gdata.time_begin)*0.001).toFixed(1)*1, food: _this._gdata.food_get }; result.point = Math.ceil(result.size*100+result.time*2+result.food*25); game.world.remove(_this._childs.foods, true); game.world.remove(_this._childs.snakeHead, true); game.world.remove(_this._childs.snakeBody, true); game.state.start('home', false, false, result); }); } }, update: function(){ var _this = this; if(this._gdata.is_end){ return; } //出界 if(!this._childs.snakeHead.inWorld){ _this._gdata.is_end = true; _this._endAction(); return; } //吃食 game.physics.arcade.overlap(this._childs.snakeHead, this._childs.foods, function(a, b){ var temp, data; if(b.name == 'foot0'){ //蛇身增长 temp = game.add.image(0,0,'snake-body',0, _this._childs.snakeBody); data = _this._gdata.snake_path[temp.z*3+3] || {x:-999, y:-999}; temp.anchor.set(0.5); temp.position.set(data.x, data.y); temp.angle_fix = temp.angle = Math.random()*360; _this._gdata.food_get += 1; }else if(b.name == 'foot1'){ //蛇身减少 if(_this._childs.snakeBody.length>1){ _this._childs.snakeBody.remove(_this._childs.snakeBody.getBottom(), true); } }else if(b.name == 'foot2'){ //中毒死亡 _this._gdata.is_end = true; _this._endAction(); } b.parent.remove(b, true); }); //更新蛇身 this._gdata.snake_path.unshift({x: this._childs.snakeHead.x, y:this._childs.snakeHead.y, a:this._childs.snakeHead.angle}); if(this._gdata.snake_path.length>300){ this._gdata.snake_path.pop(); } this._childs.snakeBody.forEachExists(function(child){ var data = _this._gdata.snake_path[child.z*3+3]; if(!data){ return; } child.position.set(data.x, data.y); child.angle=data.angle+data.angle_fix; }); }, shutdown: function(){ game.world.alpha=1; } } } for(var i in game._state){ game.state.add(i, game._state[i], false); } game.state.start('boot'); };
var t
conditional_block
main.js
var app = {}, game; app.pixelRatio = window.devicePixelRatio || 1; app.ios = !!navigator.userAgent.match(/\(i[^;]+;( U;)? CPU.+Mac OS X/); app.weixin = navigator.userAgent.toLowerCase().match(/MicroMessenger/i)=="micromessenger"; app.width = window.innerWidth; app.height = window.innerHeight; app.isTouch = window.ontouchstart===undefined ? false : true; app.evtDown = app.isTouch?"touchstart":"mousedown"; app.evtMove = app.isTouch?"touchmove":"mousemove"; app.evtUp = app.isTouch?"touchend":"mouseup"; app.evtClick = app.isTouch?"tap":"click"; window.addEventListener('load', function(){ document.querySelector('.game').style.display = 'block'; initGame(); }, false); function initGame(){ //game = new Phaser.Game(app.width*app.pixelRatio*0.5, app.height*app.pixelRatio*0.5, Phaser.WEBGL, document.querySelector('.game'), { game = new Phaser.Game( 640, 1008, Phaser.WEBGL, document.querySelector('.game'), null, true ); game._state = { boot: { init: function(){ console.log('boot.init'); game.scale.scaleMode = Phaser.ScaleManager.EXACT_FIT; }, create: function(){ console.log('boot.create'); setTimeout(function(){ game.state.start('loading'); }, 100); }, shutdown: function(){ console.log('boot.shutdown'); } }, loading: { init: function(){ console.log('loading.init'); this._childs = {}; //进度条背景 this._childs.line = game.add.graphics(0, 0); this._childs.line.lineStyle(2, 0xffffff, 0.2); this._childs.line.moveTo(game.width*0.2, game.height*0.5); this._childs.line.lineTo(game.width*0.8, game.height*0.5); //进度条 this._childs.line2 = game.add.graphics(0, 0); this._childs.line2.lineStyle(2, 0xffffff, 1); //进度文字 this._childs.text = game.add.text(game.width*0.5, game.height*0.47, '0%'); this._childs.text.anchor.setTo(0.5, 0.5); this._childs.text.align = 'center'; this._childs.text.fontWeight = 'normal'; this._childs.text.fontSize = 24; this._childs.text.fill = '#fff'; }, preload: function(){ console.log('loading.preload'); var _this = this; game.load.onFileComplete.add(function(p){ _this._childs.text.text = p+'%'; _this._childs.line2.moveTo(game.width*0.2, game.height*0.5); _this._childs.line2.lineTo(game.width*0.2+(game.width*0.6*p*0.01), game.height*0.5); }); game.load.onLoadComplete.addOnce(function(){ game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true, 300).onComplete.addOnce(function(){ game.state.start('home'); }); }); game.load.image('loading', 'img/loading.png'); game.load.image('bg', 'img/bg.jpg'); game.load.image('btn', 'img/btn.png'); game.load.spritesheet('food', 'img/food.png', 32, 32, 3); game.load.image('title', 'img/title.png'); game.load.image('snake-head', 'img/snake-head.png'); game.load.image('snake-body', 'img/snake-body.png'); game.load.spritesheet('result', 'img/result.png', 32, 32, 3); }, shutdown: function(){ game.world.alpha=1; } }, home: { init: function(data){ this._result = data; }, create: function(){ console.log('home.create'); var temp, _this = this; this._childs = {}; //背景 this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg'); //标题 this._childs.title = game.add.sprite(0, 100, 'title'); this._childs.title.anchor.set(0.5); this._childs.title.position.set(game.width*0.5, game.height*0.2); //按钮 this._childs.btn = game.add.button(0,0,'btn'); this._childs.btn.anchor.set(0.5); this._childs.btn.position.set(game.width*0.5, this._result?game.height*0.85:game.height*0.75); temp = game.add.text(0,0, this._result?'再玩一次':'开始游戏'); temp.align = 'center'; temp.font = 'arial'; temp.fontWeight = 'normal'; temp.fontSize = 48; temp.fill = '#fff'; temp.anchor.setTo(0.5, 0.45); this._childs.btn.addChild(temp); this._childs.btn.onInputDown.add(function(el, e){ el.scale.set(0.97); el.getChildAt(0).fill = '#cfc'; }); this._childs.btn.onInputUp.add(function(el, e){ el.scale.set(1); el.getChildAt(0).fill = '#fff'; if(e.timeUp-e.timeDown<500){ game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){ game.state.start('play'); }); } }); //动画 game.add.tween(this._childs.title.scale).from({x:5,y:5}, 600, Phaser.Easing.Cubic.In, true); game.add.tween(this._childs.title).from({alpha:0, rotation:-Math.PI*2}, 600, Phaser.Easing.Cubic.In, true); game.add.tween(this._childs.btn).from({alpha:0,y:'+200'}, 300, Phaser.Easing.Cubic.Out, true, 700); //成绩 if(this._result){ this._childs.point = game.add.text(0,0,'0'); this._childs.point.align = 'center'; this._childs.point.fontSize = 72; this._childs.point.fill = '#fff'; this._childs.point.anchor.set(0.5); this._childs.point.position.set(game.width*0.5, this._childs.title.y+200); this._childs.point_line = game.add.graphics(game.width*0.5, this._childs.point.y+60); this._childs.point_line.lineStyle(3, 0xffffff, 0.75); this._childs.point_line.moveTo(-game.width*0.3, 0); this._childs.point_line.lineTo(game.width*0.3, 0); for(var i=1; i<=3; i++){ temp = game.add.text(game.width*0.42, this._childs.point_line.y-10+i*50, ' '); temp.font = 'arial'; temp.fontSize = 30; temp.fill = '#fff'; temp.fontWeight = 'normal'; temp.addChild(game.add.image(-50, 0, 'result', i-1)); this._childs['point'+i] = temp; } this._childs.point1.text = '身长 '+this._result.size+'米'; this._childs.point2.text = '吃掉 '+this._result.food+'个'; this._childs.point3.text = '用时 '+this._result.time+'秒'; game.add.tween(this._childs.point_line.scale).from({x:0}, 300, Phaser.Easing.Cubic.Out, true, 1000); game.add.tween(this._childs.point).from({y:'+50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1200); game.add.tween(this._childs.point1).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1500); game.add.tween(this._childs.point2).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1750); game.add.tween(this._childs.point3).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 2000); game.add.tween(this._result).from({point:0}, 1000, Phaser.Easing.Linear.None, true, 1400).onUpdateCallback(function(tween, percent, tweenData){ _this._childs.point.text = Math.floor(tween.target.point); }).onComplete.addOnce(function(){ _this._childs.point.text = _this._result.point; }); } }, update: function(){ this._childs.bg.tilePosition.y+=1; }, shutdown: function(){ game.world.alpha=1; } }, play: { create: function(){ console.log('play.create'); var _this=this, temp; this._childs = {}; this._gdata = { is_end: false, time_begin: game.time.now, food_get: 0, snake_speed: 400, snake_path: [] } game.physics.startSystem(Phaser.Physics.ARCADE); //背景 this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg'); //食物 this._childs.foods = game.add.group(undefined, 'foods'); this._childs.foods.enableBody = true; this._childs.foods.physicsBodyType = Phaser.Physics.ARCADE; function addFood(){ if(_this._gdata.is_end || _this._childs.foods.length>10){ return; }
r type = game.rnd.frac()>0.3 ? 0 : (game.rnd.frac()>0.4 ? 1 : 2); var temp = _this._childs.foods.create(game.rnd.between(20, game.width-20), game.rnd.between(20, game.height-20), 'food', type); temp.name = 'foot'+type; temp.anchor.set(0.5); temp.body.enable = false; game.add.tween(temp.scale).from({x:0, y:0}, 200, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){ if(_this._gdata.is_end){ return; } temp = 0.7; this.body.setSize(this.width*temp, this.height*temp, this.width*(1-temp)*0.5, this.height*(1-temp)*0.5); this.body.enable = true; game.time.events.add(Math.floor(Math.random()*1000)+1000, addFood); }, temp); if(type>0){ game.add.tween(temp).to({alpha:0}, 200, Phaser.Easing.Linear.None, true, 4000).onComplete.addOnce(function(e){ e.parent.remove(e, true); }); } } game.time.events.add(800, addFood); //蛇头 this._childs.snakeHead = game.add.sprite(0,0,'snake-head'); this._childs.snakeHead.anchor.set(0.5); this._childs.snakeHead.position.set(game.width*0.5, game.height); game.physics.arcade.enableBody(this._childs.snakeHead); temp = 0.6; this._childs.snakeHead.body.setSize(this._childs.snakeHead.width*temp, this._childs.snakeHead.height*temp, this._childs.snakeHead.width*(1-temp)*0.5, this._childs.snakeHead.height*(1-temp)*0.5); this._childs.snakeHead.body.allowRotation = false; this._childs.snakeHead.body.velocity.setTo(0, -this._gdata.snake_speed); //蛇身 this._childs.snakeBody = game.add.group(undefined, 'snakeBody'); for(var i=0;i<3;i++){ temp = game.add.image(0,0,'snake-body',0,this._childs.snakeBody); temp.anchor.set(0.5); temp.position.set(this._childs.snakeHead.x, this._childs.snakeHead.y); temp.angle_fix = temp.angle = Math.random()*360; } game.world.swapChildren(this._childs.snakeHead, this._childs.snakeBody); //交互层 this._childs.touchLayer = game.add.button(0,0); this._childs.touchLayer.width = game.width; this._childs.touchLayer.height = game.height; this._childs.touchLayer.onInputDown.add(function(el, e){ if(_this._gdata.is_end){ return; } var p = new Phaser.Point(e.position.x-_this._childs.snakeHead.x, e.position.y-_this._childs.snakeHead.y); p.normalize(); p.multiply(_this._gdata.snake_speed, _this._gdata.snake_speed); _this._gdata.snake_speed+=1; _this._childs.snakeHead.body.velocity.setTo(p.x, p.y); _this._childs.snakeHead.rotation = game.physics.arcade.angleToPointer(_this._childs.snakeHead, e)+Math.PI*0.5; }); //游戏结束 this._endAction = function(){ game.physics.arcade.isPaused = true; game.state.getCurrentState().state.pause(); game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true, 300).onComplete.addOnce(function(){ var result = { point: 0, size: ((_this._childs.snakeBody.length+1)*0.1).toFixed(1)*1, time: ((game.time.now-_this._gdata.time_begin)*0.001).toFixed(1)*1, food: _this._gdata.food_get }; result.point = Math.ceil(result.size*100+result.time*2+result.food*25); game.world.remove(_this._childs.foods, true); game.world.remove(_this._childs.snakeHead, true); game.world.remove(_this._childs.snakeBody, true); game.state.start('home', false, false, result); }); } }, update: function(){ var _this = this; if(this._gdata.is_end){ return; } //出界 if(!this._childs.snakeHead.inWorld){ _this._gdata.is_end = true; _this._endAction(); return; } //吃食 game.physics.arcade.overlap(this._childs.snakeHead, this._childs.foods, function(a, b){ var temp, data; if(b.name == 'foot0'){ //蛇身增长 temp = game.add.image(0,0,'snake-body',0, _this._childs.snakeBody); data = _this._gdata.snake_path[temp.z*3+3] || {x:-999, y:-999}; temp.anchor.set(0.5); temp.position.set(data.x, data.y); temp.angle_fix = temp.angle = Math.random()*360; _this._gdata.food_get += 1; }else if(b.name == 'foot1'){ //蛇身减少 if(_this._childs.snakeBody.length>1){ _this._childs.snakeBody.remove(_this._childs.snakeBody.getBottom(), true); } }else if(b.name == 'foot2'){ //中毒死亡 _this._gdata.is_end = true; _this._endAction(); } b.parent.remove(b, true); }); //更新蛇身 this._gdata.snake_path.unshift({x: this._childs.snakeHead.x, y:this._childs.snakeHead.y, a:this._childs.snakeHead.angle}); if(this._gdata.snake_path.length>300){ this._gdata.snake_path.pop(); } this._childs.snakeBody.forEachExists(function(child){ var data = _this._gdata.snake_path[child.z*3+3]; if(!data){ return; } child.position.set(data.x, data.y); child.angle=data.angle+data.angle_fix; }); }, shutdown: function(){ game.world.alpha=1; } } } for(var i in game._state){ game.state.add(i, game._state[i], false); } game.state.start('boot'); };
va
identifier_name
main.js
var app = {}, game; app.pixelRatio = window.devicePixelRatio || 1; app.ios = !!navigator.userAgent.match(/\(i[^;]+;( U;)? CPU.+Mac OS X/); app.weixin = navigator.userAgent.toLowerCase().match(/MicroMessenger/i)=="micromessenger"; app.width = window.innerWidth; app.height = window.innerHeight; app.isTouch = window.ontouchstart===undefined ? false : true; app.evtDown = app.isTouch?"touchstart":"mousedown"; app.evtMove = app.isTouch?"touchmove":"mousemove"; app.evtUp = app.isTouch?"touchend":"mouseup"; app.evtClick = app.isTouch?"tap":"click"; window.addEventListener('load', function(){ document.querySelector('.game').style.display = 'block'; initGame(); }, false); function initGame(){ //game = new Phaser.Game(app.width*app.pixelRatio*0.5, app.height*app.pixelRatio*0.5, Phaser.WEBGL, document.querySelector('.game'), { game = new Phaser.Game( 640, 1008, Phaser.WEBGL, document.querySelector('.game'), null, true ); game._state = { boot: { init: function(){ console.log('boot.init'); game.scale.scaleMode = Phaser.ScaleManager.EXACT_FIT; }, create: function(){ console.log('boot.create'); setTimeout(function(){ game.state.start('loading'); }, 100); }, shutdown: function(){ console.log('boot.shutdown'); } }, loading: { init: function(){ console.log('loading.init'); this._childs = {}; //进度条背景 this._childs.line = game.add.graphics(0, 0); this._childs.line.lineStyle(2, 0xffffff, 0.2); this._childs.line.moveTo(game.width*0.2, game.height*0.5); this._childs.line.lineTo(game.width*0.8, game.height*0.5); //进度条 this._childs.line2 = game.add.graphics(0, 0); this._childs.line2.lineStyle(2, 0xffffff, 1); //进度文字 this._childs.text = game.add.text(game.width*0.5, game.height*0.47, '0%'); this._childs.text.anchor.setTo(0.5, 0.5); this._childs.text.align = 'center'; this._childs.text.fontWeight = 'normal'; this._childs.text.fontSize = 24; this._childs.text.fill = '#fff'; }, preload: function(){ console.log('loading.preload'); var _this = this; game.load.onFileComplete.add(function(p){ _this._childs.text.text = p+'%'; _this._childs.line2.moveTo(game.width*0.2, game.height*0.5); _this._childs.line2.lineTo(game.width*0.2+(game.width*0.6*p*0.01), game.height*0.5); }); game.load.onLoadComplete.addOnce(function(){ game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true, 300).onComplete.addOnce(function(){ game.state.start('home'); }); }); game.load.image('loading', 'img/loading.png'); game.load.image('bg', 'img/bg.jpg'); game.load.image('btn', 'img/btn.png'); game.load.spritesheet('food', 'img/food.png', 32, 32, 3); game.load.image('title', 'img/title.png'); game.load.image('snake-head', 'img/snake-head.png'); game.load.image('snake-body', 'img/snake-body.png'); game.load.spritesheet('result', 'img/result.png', 32, 32, 3); }, shutdown: function(){ game.world.alpha=1; } }, home: { init: function(data){ this._result = data; }, create: function(){ console.log('home.create'); var temp, _this = this; this._childs = {}; //背景 this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg'); //标题 this._childs.title = game.add.sprite(0, 100, 'title'); this._childs.title.anchor.set(0.5); this._childs.title.position.set(game.width*0.5, game.height*0.2); //按钮 this._childs.btn = game.add.button(0,0,'btn'); this._childs.btn.anchor.set(0.5); this._childs.btn.position.set(game.width*0.5, this._result?game.height*0.85:game.height*0.75); temp = game.add.text(0,0, this._result?'再玩一次':'开始游戏'); temp.align = 'center'; temp.font = 'arial'; temp.fontWeight = 'normal'; temp.fontSize = 48; temp.fill = '#fff'; temp.anchor.setTo(0.5, 0.45); this._childs.btn.addChild(temp); this._childs.btn.onInputDown.add(function(el, e){ el.scale.set(0.97); el.getChildAt(0).fill = '#cfc'; }); this._childs.btn.onInputUp.add(function(el, e){ el.scale.set(1); el.getChildAt(0).fill = '#fff'; if(e.timeUp-e.timeDown<500){ game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){ game.state.start('play'); }); } }); //动画 game.add.tween(this._childs.title.scale).from({x:5,y:5}, 600, Phaser.Easing.Cubic.In, true); game.add.tween(this._childs.title).from({alpha:0, rotation:-Math.PI*2}, 600, Phaser.Easing.Cubic.In, true); game.add.tween(this._childs.btn).from({alpha:0,y:'+200'}, 300, Phaser.Easing.Cubic.Out, true, 700); //成绩 if(this._result){ this._childs.point = game.add.text(0,0,'0'); this._childs.point.align = 'center'; this._childs.point.fontSize = 72; this._childs.point.fill = '#fff'; this._childs.point.anchor.set(0.5); this._childs.point.position.set(game.width*0.5, this._childs.title.y+200); this._childs.point_line = game.add.graphics(game.width*0.5, this._childs.point.y+60); this._childs.point_line.lineStyle(3, 0xffffff, 0.75); this._childs.point_line.moveTo(-game.width*0.3, 0); this._childs.point_line.lineTo(game.width*0.3, 0); for(var i=1; i<=3; i++){ temp = game.add.text(game.width*0.42, this._childs.point_line.y-10+i*50, ' '); temp.font = 'arial'; temp.fontSize = 30; temp.fill = '#fff'; temp.fontWeight = 'normal'; temp.addChild(game.add.image(-50, 0, 'result', i-1)); this._childs['point'+i] = temp; } this._childs.point1.text = '身长 '+this._result.size+'米'; this._childs.point2.text = '吃掉 '+this._result.food+'个'; this._childs.point3.text = '用时 '+this._result.time+'秒'; game.add.tween(this._childs.point_line.scale).from({x:0}, 300, Phaser.Easing.Cubic.Out, true, 1000); game.add.tween(this._childs.point).from({y:'+50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1200); game.add.tween(this._childs.point1).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1500); game.add.tween(this._childs.point2).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 1750); game.add.tween(this._childs.point3).from({y:'-50', alpha:0}, 300, Phaser.Easing.Cubic.Out, true, 2000); game.add.tween(this._result).from({point:0}, 1000, Phaser.Easing.Linear.None, true, 1400).onUpdateCallback(function(tween, percent, tweenData){ _this._childs.point.text = Math.floor(tween.target.point); }).onComplete.addOnce(function(){ _this._childs.point.text = _this._result.point; }); } }, update: function(){ this._childs.bg.tilePosition.y+=1; }, shutdown: function(){ game.world.alpha=1; } }, play: { create: function(){ console.log('play.create'); var _this=this, temp; this._childs = {}; this._gdata = { is_end: false, time_begin: game.time.now, food_get: 0, snake_speed: 400, snake_path: [] } game.physics.startSystem(Phaser.Physics.ARCADE); //背景 this._childs.bg = game.add.tileSprite(0, 0, game.width, game.height, 'bg'); //食物 this._childs.foods = game.add.group(undefined, 'foods'); this._childs.foods.enableBody = true; this._childs.foods.physicsBodyType = Phaser.Physics.ARCADE; function addFood(){ if(_this._gdata.is_end || _this._childs.foods.length>10){ return; } var
.add.sprite(0,0,'snake-head'); this._childs.snakeHead.anchor.set(0.5); this._childs.snakeHead.position.set(game.width*0.5, game.height); game.physics.arcade.enableBody(this._childs.snakeHead); temp = 0.6; this._childs.snakeHead.body.setSize(this._childs.snakeHead.width*temp, this._childs.snakeHead.height*temp, this._childs.snakeHead.width*(1-temp)*0.5, this._childs.snakeHead.height*(1-temp)*0.5); this._childs.snakeHead.body.allowRotation = false; this._childs.snakeHead.body.velocity.setTo(0, -this._gdata.snake_speed); //蛇身 this._childs.snakeBody = game.add.group(undefined, 'snakeBody'); for(var i=0;i<3;i++){ temp = game.add.image(0,0,'snake-body',0,this._childs.snakeBody); temp.anchor.set(0.5); temp.position.set(this._childs.snakeHead.x, this._childs.snakeHead.y); temp.angle_fix = temp.angle = Math.random()*360; } game.world.swapChildren(this._childs.snakeHead, this._childs.snakeBody); //交互层 this._childs.touchLayer = game.add.button(0,0); this._childs.touchLayer.width = game.width; this._childs.touchLayer.height = game.height; this._childs.touchLayer.onInputDown.add(function(el, e){ if(_this._gdata.is_end){ return; } var p = new Phaser.Point(e.position.x-_this._childs.snakeHead.x, e.position.y-_this._childs.snakeHead.y); p.normalize(); p.multiply(_this._gdata.snake_speed, _this._gdata.snake_speed); _this._gdata.snake_speed+=1; _this._childs.snakeHead.body.velocity.setTo(p.x, p.y); _this._childs.snakeHead.rotation = game.physics.arcade.angleToPointer(_this._childs.snakeHead, e)+Math.PI*0.5; }); //游戏结束 this._endAction = function(){ game.physics.arcade.isPaused = true; game.state.getCurrentState().state.pause(); game.add.tween(game.world).to({alpha:0}, 300, Phaser.Easing.Linear.None, true, 300).onComplete.addOnce(function(){ var result = { point: 0, size: ((_this._childs.snakeBody.length+1)*0.1).toFixed(1)*1, time: ((game.time.now-_this._gdata.time_begin)*0.001).toFixed(1)*1, food: _this._gdata.food_get }; result.point = Math.ceil(result.size*100+result.time*2+result.food*25); game.world.remove(_this._childs.foods, true); game.world.remove(_this._childs.snakeHead, true); game.world.remove(_this._childs.snakeBody, true); game.state.start('home', false, false, result); }); } }, update: function(){ var _this = this; if(this._gdata.is_end){ return; } //出界 if(!this._childs.snakeHead.inWorld){ _this._gdata.is_end = true; _this._endAction(); return; } //吃食 game.physics.arcade.overlap(this._childs.snakeHead, this._childs.foods, function(a, b){ var temp, data; if(b.name == 'foot0'){ //蛇身增长 temp = game.add.image(0,0,'snake-body',0, _this._childs.snakeBody); data = _this._gdata.snake_path[temp.z*3+3] || {x:-999, y:-999}; temp.anchor.set(0.5); temp.position.set(data.x, data.y); temp.angle_fix = temp.angle = Math.random()*360; _this._gdata.food_get += 1; }else if(b.name == 'foot1'){ //蛇身减少 if(_this._childs.snakeBody.length>1){ _this._childs.snakeBody.remove(_this._childs.snakeBody.getBottom(), true); } }else if(b.name == 'foot2'){ //中毒死亡 _this._gdata.is_end = true; _this._endAction(); } b.parent.remove(b, true); }); //更新蛇身 this._gdata.snake_path.unshift({x: this._childs.snakeHead.x, y:this._childs.snakeHead.y, a:this._childs.snakeHead.angle}); if(this._gdata.snake_path.length>300){ this._gdata.snake_path.pop(); } this._childs.snakeBody.forEachExists(function(child){ var data = _this._gdata.snake_path[child.z*3+3]; if(!data){ return; } child.position.set(data.x, data.y); child.angle=data.angle+data.angle_fix; }); }, shutdown: function(){ game.world.alpha=1; } } } for(var i in game._state){ game.state.add(i, game._state[i], false); } game.state.start('boot'); };
type = game.rnd.frac()>0.3 ? 0 : (game.rnd.frac()>0.4 ? 1 : 2); var temp = _this._childs.foods.create(game.rnd.between(20, game.width-20), game.rnd.between(20, game.height-20), 'food', type); temp.name = 'foot'+type; temp.anchor.set(0.5); temp.body.enable = false; game.add.tween(temp.scale).from({x:0, y:0}, 200, Phaser.Easing.Linear.None, true).onComplete.addOnce(function(){ if(_this._gdata.is_end){ return; } temp = 0.7; this.body.setSize(this.width*temp, this.height*temp, this.width*(1-temp)*0.5, this.height*(1-temp)*0.5); this.body.enable = true; game.time.events.add(Math.floor(Math.random()*1000)+1000, addFood); }, temp); if(type>0){ game.add.tween(temp).to({alpha:0}, 200, Phaser.Easing.Linear.None, true, 4000).onComplete.addOnce(function(e){ e.parent.remove(e, true); }); } } game.time.events.add(800, addFood); //蛇头 this._childs.snakeHead = game
identifier_body
worldcup.js
// d: data in selection - p: data you mouseover // \ gets triggered by body onload in html function createSoccerViz() { d3.csv("data/sandbox/worldcup.csv", function(data) { overallTeamViz(data); }); } function overallTeamViz(incomingData) { d3.select("svg") .append('g') // appends <g> to <svg> to move it and center contents more easily .attr('id', 'teamsG') .attr('transform', 'translate(50,300)') .selectAll("g") .data(incomingData) .enter() // ** creates <g> for each team (to add labels for example) .append("g") .attr('class', 'overallG') // add class overallG to be able to select below .attr('transform', function(d,i) { return 'translate(' + (i * 50) + ', 0)' }); var teamG = d3.selectAll('g.overallG'); // assign selection with 'teamG', for DRY // * create pink circles teamG .append('circle') // .attr('r', 20) // ** transition - circle size pulses over 1000ms .attr('r', 0) .transition() .delay(function(d, i) { return i * 100 }) // circles appear one by one - i * 100ms .duration(500) .attr('r', 20) .transition() .duration(500) .attr("r", 20) // .style('fill', 'pink') .style('stroke', 'black') .style('stroke-width', '1px'); // ** create labels teamG .append('text') .style('text-anchor', 'middle') // how the label aligns compared to the position you give it .attr('y', 30) // .style('font-size', '10px') .text(function(d) { return d.team; }) // -------------------------------------------------------------------- // ** add buttons to filter / adjust the chart // \ creating buttons dynamically (like this) is scalable for different datasets var dataKeys = d3.keys(incomingData[0]).filter(function(el) { // d3.keys returns the names of the attr of an object as an array ( labels are [0] - team, region, win, loss ... ) return el != "team" && el != "region"; // we want buttons for everything except team and region }); d3.select("#controls") .selectAll("button.teams") // select all buttons with the class 'team' // \ dataKeys consists of an array of attr names, so the d corresponds to one of those names and makes a good button title .data(dataKeys) // numerical data (all data except "team" and "region"(are both strings)) .enter() .append('button') .on("click", buttonClick) // gives onclick behaviour to each button. with a wrapper that gives access to the data that was bound to it when it was created // .attr("onclick", "console.log('click')" // alternative for on('click') to access HTML mouse events - notice " " at console.log // \ There’s a D3-specific reason to use the .on function: it sends the bound data to the function automatically and in the same format as the anonymous inline functions we’ve been using to set style and attribute. .html(function(d) { return d; }); // shows text on buttons - dataKeys = incomingData[0] /* // ** add interactivity to button click // \ We can create buttons based on the attributes of the data and dynamically measure the data based on the attribute bound to the button. function buttonClick(datapoint) { // fires on button click - bound data is automatically sent as first argument var maxValue = d3.max(incomingData, function(d) { return parseFloat(d[datapoint]); // click on button sends datapoint }); var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]); // * resize radius of circles from each team per category teamG .select("circle") .transition() .duration(1000) .attr('r', function(p) { return radiusScale(p[datapoint]); }); } */ // -------------------------------------------------------------------- /* // ** add interactivity on mouseover teamG.on("mouseover", highlightRegion); function highlightRegion(d) { teamG // = d3.selectAll("g.overallG") .select("circle") .style('fill', function(p) { // changed to p because d already defined return p.region == d.region ? "red" : "gray"; // circle turns red if you mouse over (if d in selection = element you moused over, turn red) }); } // ** add interactivity on mouseout teamG.on('mouseout', function() { teamG .select("circle") .style("fill", "pink"); }); */ // -------------------------------------------------------------------- // \ access dom element with 'this' (only in inline function) or '.node()' // \ useful cause you can use js functionality (ex: clone, measure path length) & re-append a child element d3.select("circle").each(function(d,i) { // select one circle so first team console.log(this); // this: <circle r="3.6.."></circle> }); d3.select("circle").node() // <circle r="3.6.."></circle> // -------------------------------------------------------------------- /* // ** add interactivity on mouseover teamG.on('mouseover', highlightRegion2); function highlightRegion2(d,i) { d3.select(this) // this = <circle> .select("text") .classed("active", true) .attr('y', 60); // move text down by 60 px d3.selectAll("g.overallG") .select('circle') .each(function(p,i) { p.region == d.region ? d3.select(this).classed("active", true) : // increase label font-size - css: circle.active d3.select(this).classed("inactive", true); }) } // ** add interactivity on mouseout teamG.on("mouseout", unHighlight) function unHighlight() { // mouse event is attached to <g> so if you mouse over circle or text it will trigger // \ you can disable like this: // teamG.select("text").style("pointer-events","none"); teamG .select("circle") .attr("class", ""); // remove active class teamG .select("text") .classed("highlight", false) .classed("active", false) // remove active class .attr("y", 30); }; // */ // -------------------------------------------------------------------- // *** use color // /* // ** add interactivity on mouseover // \ use css when possible, d3 functions are inline (ex: dynmic colors and transparency) teamG.on('mouseover', highlightRegion2); function highlightRegion2(d,i) { // \ colors in rgb get muddy, unless you break the color ramp in multiple stops teamColor = d3.rgb('pink') // or: d3.rgb(255,0,0); ('#ff0000'), ("rgb(255,0,0)") d3.select(this) .select('text') .classed('highlight', true) .attr('y,10') teamG // = d3.selectAll('g.overallG') .select('circle') .style('fill', function(p) { return p.region == d.region? teamColor.darker(.75) : teamColor.brighter(.5) // .darken() & .brighten() }) this.parentElement.appendChild(this); } // ** add interactivity on buttonClick function buttonClick(datapoint) { var maxValue = d3.max(incomingData, function(d) { return parseFloat(d[datapoint]); }); var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]); // var ybRamp = d3.scale.linear().domain([0, maxValue]).range(['yellow', 'blue']); // \ use interpolate to use any other scale than rgb - hsl, hcl // var ybRamp = d3.scale.linear().interpolate(d3.interpolateHsl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue green var ybRamp = d3.scale.linear().interpolate(d3.interpolateHcl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue pink orange - use d3.hsl() when you darken pink (avoid muddying) // var ybRamp = d3.scale.linear().interpolate(d3.interpolateLab).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue purple beige var tenColorScale = d3.scale.category10(["UEFA", "CONMEBOL", "CAF", "AFC"]); // \ colorbrewer: designed for qualitive data separated into categories: use quantize! - so we need to sort the numerical data into ranges var colorQuantize = d3.scale.quantize().domain([0,maxValue]).range(colorbrewer.Reds[3]); // sorts data into [3] categories according to data value teamG .select("circle") .transition() .duration(1000) .attr('r', function(p) { return radiusScale(p[datapoint]); }) // .style('fill', function(p) { // return ybRamp(p[datapoint]) // adds color for data values (magnitude) // }); // .style('fill', function(p) { // return tenColorScale(p.region) // adds color per region // }) .style('fill', function(p) { return colorQuantize(p[datapoint]); // each category has a different shade of red }) } // */ // *** use images // \ you can resize images on buttonClick - does not work well with raster img (= png, jpg etc) teamG .insert('image', 'text') // use insert() (not append()) to insert the images before the text elements, keeps the labels from being drawn behind the added images .attr('xlink:href', function(d) { return "img/" + d.team + ".png"; }) .attr('width', '45px') // you need to set width & height for svg images to show! .attr('height', '20px') .attr('x', '-22') // - 1/2 x value to center image .attr('y', '40'); // - 1/2 y value to center image = -10 // */ // -------------------------------------------------------------------- // *** modal with stats per team - uses modal.html // ** use d3.text() with .html() d3.text('modal.html', function(data) { d3.select('body') .append('div') // creates a new div .attr('id', 'modal') // with id as in main.css .html(data); // and fills it with html content from modal.html }); teamG.on('click', teamClick); function teamClick(d) { //selects and updates td.data as you click on a team d3.selectAll('td.data') // td with class data from modal.html .data(d3.values(d)) .html(function(p) { return p }); } // -------------------------------------------------------------------- // *** pregenerated .svg // * add with 'x-link:href' teamG // add svg molecule image to each team .insert('image', 'text') .attr('x-link:href', 'img/molecule.svg') .attr('width', '60') .attr('height', '60') .attr('x', '-30') .attr('y', '-80'); // ** use d3.html() so you can do more manipulation on HTML nodes with: // d3.html('img/football.svg', function(data) { // console.log(data); // contains <svg> > <g> > <path>, but we only want <p> // }) // ----- when you don't add svg to data d3.html('img/football.svg', loadSVG1); function loadSVG1(svgData) { // load svg into the fragment // \ .empty() checks if selection has elements inside it, fires true after we moved the paths out of the fragments into main svg // \ .empty() with while statement lets us move all path elements into the SVG canvas out of the fragment while (!d3.select(svgData).selectAll('path').empty()) {
3.selectAll('path').attr('transform', 'translate(50,50)'); // move 50x and 50y from left corner (0,0) } // ----- when you add svg to data d3.html('img/football.svg', loadSVG2); function loadSVG2(svgData) { // \ drawback1: can't use insert() so you need to put images in right order // \ drawback2: added with cloneNode() so they have no data bound to them > see next * d3.selectAll('g').each(function() { // each statement for each <g> var gParent = this; // = <g> node console.log(this); d3.select(svgData).selectAll('path').each(function() { // each statement for each <path> gParent.appendChild(this.cloneNode(true)) // clone the paths and append to each <g> (each team) }); }) recolorFootballs(); } function recolorFootballs() { // d3.selectAll('path') // .attr('transform', 'translate(-15, -15) scale(0.3)') // make the football smaller // .style('fill', '#d206a4') // .style('stroke-width', '2px') // .style('stroke', 'white') // */ // * (you can rebind: first select <g> .each() and then bind to <path> with .datum()= teamG.each(function(d) { d3.select(this) .selectAll('path') .datum(d) // 1 (singular for) data() - when you're binding 1 piece of data to an element) }); var tenColorScale = d3.scale.category10(["UEFA", "CONMEBOL", "CAF", "AFC"]); d3.selectAll('path') .attr('transform', 'translate(-15, -15) scale(0.3)') .style('fill', function (p) { return tenColorScale(p.region); // !! NOT DEFINED ERROR?? }) .style('stroke', 'white') .style('stroke-width', '2px'); } }
d3.select('svg').node().appendChild( // use .node() to access dom elements d3.select(svgData).select('path').node()); } d
conditional_block
worldcup.js
// d: data in selection - p: data you mouseover // \ gets triggered by body onload in html function createSoccerViz() { d3.csv("data/sandbox/worldcup.csv", function(data) { overallTeamViz(data); }); } function overallTeamViz(incomingData) { d3.select("svg") .append('g') // appends <g> to <svg> to move it and center contents more easily .attr('id', 'teamsG') .attr('transform', 'translate(50,300)') .selectAll("g") .data(incomingData) .enter() // ** creates <g> for each team (to add labels for example) .append("g") .attr('class', 'overallG') // add class overallG to be able to select below .attr('transform', function(d,i) { return 'translate(' + (i * 50) + ', 0)' }); var teamG = d3.selectAll('g.overallG'); // assign selection with 'teamG', for DRY // * create pink circles teamG .append('circle') // .attr('r', 20) // ** transition - circle size pulses over 1000ms .attr('r', 0) .transition() .delay(function(d, i) { return i * 100 }) // circles appear one by one - i * 100ms .duration(500) .attr('r', 20) .transition() .duration(500) .attr("r", 20) // .style('fill', 'pink') .style('stroke', 'black') .style('stroke-width', '1px'); // ** create labels teamG .append('text') .style('text-anchor', 'middle') // how the label aligns compared to the position you give it .attr('y', 30) // .style('font-size', '10px') .text(function(d) { return d.team; }) // -------------------------------------------------------------------- // ** add buttons to filter / adjust the chart // \ creating buttons dynamically (like this) is scalable for different datasets var dataKeys = d3.keys(incomingData[0]).filter(function(el) { // d3.keys returns the names of the attr of an object as an array ( labels are [0] - team, region, win, loss ... ) return el != "team" && el != "region"; // we want buttons for everything except team and region }); d3.select("#controls") .selectAll("button.teams") // select all buttons with the class 'team' // \ dataKeys consists of an array of attr names, so the d corresponds to one of those names and makes a good button title .data(dataKeys) // numerical data (all data except "team" and "region"(are both strings)) .enter() .append('button') .on("click", buttonClick) // gives onclick behaviour to each button. with a wrapper that gives access to the data that was bound to it when it was created // .attr("onclick", "console.log('click')" // alternative for on('click') to access HTML mouse events - notice " " at console.log // \ There’s a D3-specific reason to use the .on function: it sends the bound data to the function automatically and in the same format as the anonymous inline functions we’ve been using to set style and attribute. .html(function(d) { return d; }); // shows text on buttons - dataKeys = incomingData[0] /* // ** add interactivity to button click // \ We can create buttons based on the attributes of the data and dynamically measure the data based on the attribute bound to the button. function buttonClick(datapoint) { // fires on button click - bound data is automatically sent as first argument var maxValue = d3.max(incomingData, function(d) { return parseFloat(d[datapoint]); // click on button sends datapoint }); var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]); // * resize radius of circles from each team per category teamG .select("circle") .transition() .duration(1000) .attr('r', function(p) { return radiusScale(p[datapoint]); }); } */ // -------------------------------------------------------------------- /* // ** add interactivity on mouseover teamG.on("mouseover", highlightRegion); function highlightRegion(d) { teamG // = d3.selectAll("g.overallG") .select("circle") .style('fill', function(p) { // changed to p because d already defined return p.region == d.region ? "red" : "gray"; // circle turns red if you mouse over (if d in selection = element you moused over, turn red) }); } // ** add interactivity on mouseout teamG.on('mouseout', function() { teamG .select("circle") .style("fill", "pink"); }); */ // -------------------------------------------------------------------- // \ access dom element with 'this' (only in inline function) or '.node()' // \ useful cause you can use js functionality (ex: clone, measure path length) & re-append a child element d3.select("circle").each(function(d,i) { // select one circle so first team console.log(this); // this: <circle r="3.6.."></circle> }); d3.select("circle").node() // <circle r="3.6.."></circle> // -------------------------------------------------------------------- /* // ** add interactivity on mouseover teamG.on('mouseover', highlightRegion2); function highlightRegion2(d,i) { d3.select(this) // this = <circle> .select("text") .classed("active", true) .attr('y', 60); // move text down by 60 px d3.selectAll("g.overallG") .select('circle') .each(function(p,i) { p.region == d.region ? d3.select(this).classed("active", true) : // increase label font-size - css: circle.active d3.select(this).classed("inactive", true); }) } // ** add interactivity on mouseout teamG.on("mouseout", unHighlight) function unHighlight() { // mouse event is attached to <g> so if you mouse over circle or text it will trigger // \ you can disable like this: // teamG.select("text").style("pointer-events","none"); teamG .select("circle") .attr("class", ""); // remove active class teamG .select("text") .classed("highlight", false) .classed("active", false) // remove active class .attr("y", 30); }; // */ // -------------------------------------------------------------------- // *** use color // /* // ** add interactivity on mouseover // \ use css when possible, d3 functions are inline (ex: dynmic colors and transparency) teamG.on('mouseover', highlightRegion2); function highlightRegion2(d,i) { // \ colors in rgb get muddy, unless you break the color ramp in multiple stops teamColor = d3.rgb('pink') // or: d3.rgb(255,0,0); ('#ff0000'), ("rgb(255,0,0)") d3.select(this) .select('text') .classed('highlight', true) .attr('y,10') teamG // = d3.selectAll('g.overallG') .select('circle') .style('fill', function(p) { return p.region == d.region? teamColor.darker(.75) : teamColor.brighter(.5) // .darken() & .brighten() }) this.parentElement.appendChild(this); } // ** add interactivity on buttonClick function buttonClick(datapoint) { var maxValue = d3.max(incomingData, function(d) { return parseFloat(d[datapoint]); }); var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]); // var ybRamp = d3.scale.linear().domain([0, maxValue]).range(['yellow', 'blue']); // \ use interpolate to use any other scale than rgb - hsl, hcl // var ybRamp = d3.scale.linear().interpolate(d3.interpolateHsl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue green var ybRamp = d3.scale.linear().interpolate(d3.interpolateHcl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue pink orange - use d3.hsl() when you darken pink (avoid muddying) // var ybRamp = d3.scale.linear().interpolate(d3.interpolateLab).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue purple beige var tenColorScale = d3.scale.category10(["UEFA", "CONMEBOL", "CAF", "AFC"]); // \ colorbrewer: designed for qualitive data separated into categories: use quantize! - so we need to sort the numerical data into ranges var colorQuantize = d3.scale.quantize().domain([0,maxValue]).range(colorbrewer.Reds[3]); // sorts data into [3] categories according to data value teamG .select("circle") .transition() .duration(1000) .attr('r', function(p) { return radiusScale(p[datapoint]); }) // .style('fill', function(p) { // return ybRamp(p[datapoint]) // adds color for data values (magnitude) // }); // .style('fill', function(p) { // return tenColorScale(p.region) // adds color per region // }) .style('fill', function(p) { return colorQuantize(p[datapoint]); // each category has a different shade of red }) } // */ // *** use images // \ you can resize images on buttonClick - does not work well with raster img (= png, jpg etc) teamG .insert('image', 'text') // use insert() (not append()) to insert the images before the text elements, keeps the labels from being drawn behind the added images .attr('xlink:href', function(d) { return "img/" + d.team + ".png"; }) .attr('width', '45px') // you need to set width & height for svg images to show! .attr('height', '20px') .attr('x', '-22') // - 1/2 x value to center image .attr('y', '40'); // - 1/2 y value to center image = -10 // */ // -------------------------------------------------------------------- // *** modal with stats per team - uses modal.html // ** use d3.text() with .html() d3.text('modal.html', function(data) { d3.select('body') .append('div') // creates a new div .attr('id', 'modal') // with id as in main.css .html(data); // and fills it with html content from modal.html }); teamG.on('click', teamClick); function teamClick(d) { //selects and updates td.data as you click on a team d3.selectAll('td.data') // td with class data from modal.html .data(d3.values(d)) .html(function(p) { return p }); } // -------------------------------------------------------------------- // *** pregenerated .svg // * add with 'x-link:href' teamG // add svg molecule image to each team .insert('image', 'text') .attr('x-link:href', 'img/molecule.svg') .attr('width', '60') .attr('height', '60') .attr('x', '-30') .attr('y', '-80'); // ** use d3.html() so you can do more manipulation on HTML nodes with: // d3.html('img/football.svg', function(data) { // console.log(data); // contains <svg> > <g> > <path>, but we only want <p> // }) // ----- when you don't add svg to data d3.html('img/football.svg', loadSVG1); function loadSVG1(svgData) { // load svg into the fragment // \ .empty() checks if selection has elements inside it, fires true after we moved the paths out of the fragments into main svg // \ .empty() with while statement lets us move all path elements into the SVG canvas out of the fragment while (!d3.select(svgData).selectAll('path').empty()) { d3.select('svg').node().appendChild( // use .node() to access dom elements d3.select(svgData).select('path').node()); } d3.selectAll('path').attr('transform', 'translate(50,50)'); // move 50x and 50y from left corner (0,0) } // ----- when you add svg to data d3.html('img/football.svg', loadSVG2); function load
Data) { // \ drawback1: can't use insert() so you need to put images in right order // \ drawback2: added with cloneNode() so they have no data bound to them > see next * d3.selectAll('g').each(function() { // each statement for each <g> var gParent = this; // = <g> node console.log(this); d3.select(svgData).selectAll('path').each(function() { // each statement for each <path> gParent.appendChild(this.cloneNode(true)) // clone the paths and append to each <g> (each team) }); }) recolorFootballs(); } function recolorFootballs() { // d3.selectAll('path') // .attr('transform', 'translate(-15, -15) scale(0.3)') // make the football smaller // .style('fill', '#d206a4') // .style('stroke-width', '2px') // .style('stroke', 'white') // */ // * (you can rebind: first select <g> .each() and then bind to <path> with .datum()= teamG.each(function(d) { d3.select(this) .selectAll('path') .datum(d) // 1 (singular for) data() - when you're binding 1 piece of data to an element) }); var tenColorScale = d3.scale.category10(["UEFA", "CONMEBOL", "CAF", "AFC"]); d3.selectAll('path') .attr('transform', 'translate(-15, -15) scale(0.3)') .style('fill', function (p) { return tenColorScale(p.region); // !! NOT DEFINED ERROR?? }) .style('stroke', 'white') .style('stroke-width', '2px'); } }
SVG2(svg
identifier_name
worldcup.js
// d: data in selection - p: data you mouseover // \ gets triggered by body onload in html function createSoccerViz() { d3.csv("data/sandbox/worldcup.csv", function(data) { overallTeamViz(data); }); } function overallTeamViz(incomingData) { d3.select("svg") .append('g') // appends <g> to <svg> to move it and center contents more easily .attr('id', 'teamsG') .attr('transform', 'translate(50,300)') .selectAll("g") .data(incomingData) .enter() // ** creates <g> for each team (to add labels for example) .append("g") .attr('class', 'overallG') // add class overallG to be able to select below .attr('transform', function(d,i) { return 'translate(' + (i * 50) + ', 0)' }); var teamG = d3.selectAll('g.overallG'); // assign selection with 'teamG', for DRY // * create pink circles teamG .append('circle') // .attr('r', 20) // ** transition - circle size pulses over 1000ms .attr('r', 0) .transition() .delay(function(d, i) { return i * 100 }) // circles appear one by one - i * 100ms .duration(500) .attr('r', 20) .transition() .duration(500) .attr("r", 20) // .style('fill', 'pink') .style('stroke', 'black') .style('stroke-width', '1px'); // ** create labels teamG .append('text') .style('text-anchor', 'middle') // how the label aligns compared to the position you give it .attr('y', 30) // .style('font-size', '10px') .text(function(d) { return d.team; }) // -------------------------------------------------------------------- // ** add buttons to filter / adjust the chart // \ creating buttons dynamically (like this) is scalable for different datasets var dataKeys = d3.keys(incomingData[0]).filter(function(el) { // d3.keys returns the names of the attr of an object as an array ( labels are [0] - team, region, win, loss ... ) return el != "team" && el != "region"; // we want buttons for everything except team and region }); d3.select("#controls") .selectAll("button.teams") // select all buttons with the class 'team' // \ dataKeys consists of an array of attr names, so the d corresponds to one of those names and makes a good button title .data(dataKeys) // numerical data (all data except "team" and "region"(are both strings)) .enter() .append('button') .on("click", buttonClick) // gives onclick behaviour to each button. with a wrapper that gives access to the data that was bound to it when it was created // .attr("onclick", "console.log('click')" // alternative for on('click') to access HTML mouse events - notice " " at console.log // \ There’s a D3-specific reason to use the .on function: it sends the bound data to the function automatically and in the same format as the anonymous inline functions we’ve been using to set style and attribute. .html(function(d) { return d; }); // shows text on buttons - dataKeys = incomingData[0] /* // ** add interactivity to button click // \ We can create buttons based on the attributes of the data and dynamically measure the data based on the attribute bound to the button. function buttonClick(datapoint) { // fires on button click - bound data is automatically sent as first argument var maxValue = d3.max(incomingData, function(d) { return parseFloat(d[datapoint]); // click on button sends datapoint }); var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]); // * resize radius of circles from each team per category teamG .select("circle") .transition() .duration(1000) .attr('r', function(p) { return radiusScale(p[datapoint]); }); } */ // -------------------------------------------------------------------- /* // ** add interactivity on mouseover teamG.on("mouseover", highlightRegion); function highlightRegion(d) { teamG // = d3.selectAll("g.overallG") .select("circle") .style('fill', function(p) { // changed to p because d already defined return p.region == d.region ? "red" : "gray"; // circle turns red if you mouse over (if d in selection = element you moused over, turn red) }); } // ** add interactivity on mouseout teamG.on('mouseout', function() { teamG .select("circle")
// -------------------------------------------------------------------- // \ access dom element with 'this' (only in inline function) or '.node()' // \ useful cause you can use js functionality (ex: clone, measure path length) & re-append a child element d3.select("circle").each(function(d,i) { // select one circle so first team console.log(this); // this: <circle r="3.6.."></circle> }); d3.select("circle").node() // <circle r="3.6.."></circle> // -------------------------------------------------------------------- /* // ** add interactivity on mouseover teamG.on('mouseover', highlightRegion2); function highlightRegion2(d,i) { d3.select(this) // this = <circle> .select("text") .classed("active", true) .attr('y', 60); // move text down by 60 px d3.selectAll("g.overallG") .select('circle') .each(function(p,i) { p.region == d.region ? d3.select(this).classed("active", true) : // increase label font-size - css: circle.active d3.select(this).classed("inactive", true); }) } // ** add interactivity on mouseout teamG.on("mouseout", unHighlight) function unHighlight() { // mouse event is attached to <g> so if you mouse over circle or text it will trigger // \ you can disable like this: // teamG.select("text").style("pointer-events","none"); teamG .select("circle") .attr("class", ""); // remove active class teamG .select("text") .classed("highlight", false) .classed("active", false) // remove active class .attr("y", 30); }; // */ // -------------------------------------------------------------------- // *** use color // /* // ** add interactivity on mouseover // \ use css when possible, d3 functions are inline (ex: dynmic colors and transparency) teamG.on('mouseover', highlightRegion2); function highlightRegion2(d,i) { // \ colors in rgb get muddy, unless you break the color ramp in multiple stops teamColor = d3.rgb('pink') // or: d3.rgb(255,0,0); ('#ff0000'), ("rgb(255,0,0)") d3.select(this) .select('text') .classed('highlight', true) .attr('y,10') teamG // = d3.selectAll('g.overallG') .select('circle') .style('fill', function(p) { return p.region == d.region? teamColor.darker(.75) : teamColor.brighter(.5) // .darken() & .brighten() }) this.parentElement.appendChild(this); } // ** add interactivity on buttonClick function buttonClick(datapoint) { var maxValue = d3.max(incomingData, function(d) { return parseFloat(d[datapoint]); }); var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]); // var ybRamp = d3.scale.linear().domain([0, maxValue]).range(['yellow', 'blue']); // \ use interpolate to use any other scale than rgb - hsl, hcl // var ybRamp = d3.scale.linear().interpolate(d3.interpolateHsl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue green var ybRamp = d3.scale.linear().interpolate(d3.interpolateHcl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue pink orange - use d3.hsl() when you darken pink (avoid muddying) // var ybRamp = d3.scale.linear().interpolate(d3.interpolateLab).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue purple beige var tenColorScale = d3.scale.category10(["UEFA", "CONMEBOL", "CAF", "AFC"]); // \ colorbrewer: designed for qualitive data separated into categories: use quantize! - so we need to sort the numerical data into ranges var colorQuantize = d3.scale.quantize().domain([0,maxValue]).range(colorbrewer.Reds[3]); // sorts data into [3] categories according to data value teamG .select("circle") .transition() .duration(1000) .attr('r', function(p) { return radiusScale(p[datapoint]); }) // .style('fill', function(p) { // return ybRamp(p[datapoint]) // adds color for data values (magnitude) // }); // .style('fill', function(p) { // return tenColorScale(p.region) // adds color per region // }) .style('fill', function(p) { return colorQuantize(p[datapoint]); // each category has a different shade of red }) } // */ // *** use images // \ you can resize images on buttonClick - does not work well with raster img (= png, jpg etc) teamG .insert('image', 'text') // use insert() (not append()) to insert the images before the text elements, keeps the labels from being drawn behind the added images .attr('xlink:href', function(d) { return "img/" + d.team + ".png"; }) .attr('width', '45px') // you need to set width & height for svg images to show! .attr('height', '20px') .attr('x', '-22') // - 1/2 x value to center image .attr('y', '40'); // - 1/2 y value to center image = -10 // */ // -------------------------------------------------------------------- // *** modal with stats per team - uses modal.html // ** use d3.text() with .html() d3.text('modal.html', function(data) { d3.select('body') .append('div') // creates a new div .attr('id', 'modal') // with id as in main.css .html(data); // and fills it with html content from modal.html }); teamG.on('click', teamClick); function teamClick(d) { //selects and updates td.data as you click on a team d3.selectAll('td.data') // td with class data from modal.html .data(d3.values(d)) .html(function(p) { return p }); } // -------------------------------------------------------------------- // *** pregenerated .svg // * add with 'x-link:href' teamG // add svg molecule image to each team .insert('image', 'text') .attr('x-link:href', 'img/molecule.svg') .attr('width', '60') .attr('height', '60') .attr('x', '-30') .attr('y', '-80'); // ** use d3.html() so you can do more manipulation on HTML nodes with: // d3.html('img/football.svg', function(data) { // console.log(data); // contains <svg> > <g> > <path>, but we only want <p> // }) // ----- when you don't add svg to data d3.html('img/football.svg', loadSVG1); function loadSVG1(svgData) { // load svg into the fragment // \ .empty() checks if selection has elements inside it, fires true after we moved the paths out of the fragments into main svg // \ .empty() with while statement lets us move all path elements into the SVG canvas out of the fragment while (!d3.select(svgData).selectAll('path').empty()) { d3.select('svg').node().appendChild( // use .node() to access dom elements d3.select(svgData).select('path').node()); } d3.selectAll('path').attr('transform', 'translate(50,50)'); // move 50x and 50y from left corner (0,0) } // ----- when you add svg to data d3.html('img/football.svg', loadSVG2); function loadSVG2(svgData) { // \ drawback1: can't use insert() so you need to put images in right order // \ drawback2: added with cloneNode() so they have no data bound to them > see next * d3.selectAll('g').each(function() { // each statement for each <g> var gParent = this; // = <g> node console.log(this); d3.select(svgData).selectAll('path').each(function() { // each statement for each <path> gParent.appendChild(this.cloneNode(true)) // clone the paths and append to each <g> (each team) }); }) recolorFootballs(); } function recolorFootballs() { // d3.selectAll('path') // .attr('transform', 'translate(-15, -15) scale(0.3)') // make the football smaller // .style('fill', '#d206a4') // .style('stroke-width', '2px') // .style('stroke', 'white') // */ // * (you can rebind: first select <g> .each() and then bind to <path> with .datum()= teamG.each(function(d) { d3.select(this) .selectAll('path') .datum(d) // 1 (singular for) data() - when you're binding 1 piece of data to an element) }); var tenColorScale = d3.scale.category10(["UEFA", "CONMEBOL", "CAF", "AFC"]); d3.selectAll('path') .attr('transform', 'translate(-15, -15) scale(0.3)') .style('fill', function (p) { return tenColorScale(p.region); // !! NOT DEFINED ERROR?? }) .style('stroke', 'white') .style('stroke-width', '2px'); } }
.style("fill", "pink"); }); */
random_line_split
worldcup.js
// d: data in selection - p: data you mouseover // \ gets triggered by body onload in html function createSoccerViz() { d3.csv("data/sandbox/worldcup.csv", function(data) { overallTeamViz(data); }); } function overallTeamViz(incomingData) { d3.select("svg") .append('g') // appends <g> to <svg> to move it and center contents more easily .attr('id', 'teamsG') .attr('transform', 'translate(50,300)') .selectAll("g") .data(incomingData) .enter() // ** creates <g> for each team (to add labels for example) .append("g") .attr('class', 'overallG') // add class overallG to be able to select below .attr('transform', function(d,i) { return 'translate(' + (i * 50) + ', 0)' }); var teamG = d3.selectAll('g.overallG'); // assign selection with 'teamG', for DRY // * create pink circles teamG .append('circle') // .attr('r', 20) // ** transition - circle size pulses over 1000ms .attr('r', 0) .transition() .delay(function(d, i) { return i * 100 }) // circles appear one by one - i * 100ms .duration(500) .attr('r', 20) .transition() .duration(500) .attr("r", 20) // .style('fill', 'pink') .style('stroke', 'black') .style('stroke-width', '1px'); // ** create labels teamG .append('text') .style('text-anchor', 'middle') // how the label aligns compared to the position you give it .attr('y', 30) // .style('font-size', '10px') .text(function(d) { return d.team; }) // -------------------------------------------------------------------- // ** add buttons to filter / adjust the chart // \ creating buttons dynamically (like this) is scalable for different datasets var dataKeys = d3.keys(incomingData[0]).filter(function(el) { // d3.keys returns the names of the attr of an object as an array ( labels are [0] - team, region, win, loss ... ) return el != "team" && el != "region"; // we want buttons for everything except team and region }); d3.select("#controls") .selectAll("button.teams") // select all buttons with the class 'team' // \ dataKeys consists of an array of attr names, so the d corresponds to one of those names and makes a good button title .data(dataKeys) // numerical data (all data except "team" and "region"(are both strings)) .enter() .append('button') .on("click", buttonClick) // gives onclick behaviour to each button. with a wrapper that gives access to the data that was bound to it when it was created // .attr("onclick", "console.log('click')" // alternative for on('click') to access HTML mouse events - notice " " at console.log // \ There’s a D3-specific reason to use the .on function: it sends the bound data to the function automatically and in the same format as the anonymous inline functions we’ve been using to set style and attribute. .html(function(d) { return d; }); // shows text on buttons - dataKeys = incomingData[0] /* // ** add interactivity to button click // \ We can create buttons based on the attributes of the data and dynamically measure the data based on the attribute bound to the button. function buttonClick(datapoint) { // fires on button click - bound data is automatically sent as first argument var maxValue = d3.max(incomingData, function(d) { return parseFloat(d[datapoint]); // click on button sends datapoint }); var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]); // * resize radius of circles from each team per category teamG .select("circle") .transition() .duration(1000) .attr('r', function(p) { return radiusScale(p[datapoint]); }); } */ // -------------------------------------------------------------------- /* // ** add interactivity on mouseover teamG.on("mouseover", highlightRegion); function highlightRegion(d) { teamG // = d3.selectAll("g.overallG") .select("circle") .style('fill', function(p) { // changed to p because d already defined return p.region == d.region ? "red" : "gray"; // circle turns red if you mouse over (if d in selection = element you moused over, turn red) }); } // ** add interactivity on mouseout teamG.on('mouseout', function() { teamG .select("circle") .style("fill", "pink"); }); */ // -------------------------------------------------------------------- // \ access dom element with 'this' (only in inline function) or '.node()' // \ useful cause you can use js functionality (ex: clone, measure path length) & re-append a child element d3.select("circle").each(function(d,i) { // select one circle so first team console.log(this); // this: <circle r="3.6.."></circle> }); d3.select("circle").node() // <circle r="3.6.."></circle> // -------------------------------------------------------------------- /* // ** add interactivity on mouseover teamG.on('mouseover', highlightRegion2); function highlightRegion2(d,i) { d3.select(this) // this = <circle> .select("text") .classed("active", true) .attr('y', 60); // move text down by 60 px d3.selectAll("g.overallG") .select('circle') .each(function(p,i) { p.region == d.region ? d3.select(this).classed("active", true) : // increase label font-size - css: circle.active d3.select(this).classed("inactive", true); }) } // ** add interactivity on mouseout teamG.on("mouseout", unHighlight) function unHighlight() { // mouse event is attached to <g> so if you mouse over circle or text it will trigger // \ you can disable like this: // teamG.select("text").style("pointer-events","none"); teamG .select("circle") .attr("class", ""); // remove active class teamG .select("text") .classed("highlight", false) .classed("active", false) // remove active class .attr("y", 30); }; // */ // -------------------------------------------------------------------- // *** use color // /* // ** add interactivity on mouseover // \ use css when possible, d3 functions are inline (ex: dynmic colors and transparency) teamG.on('mouseover', highlightRegion2); function highlightRegion2(d,i) { // \ colors in rgb get muddy, unless you break the color ramp in multiple stops teamColor = d3.rgb('pink') // or: d3.rgb(255,0,0); ('#ff0000'), ("rgb(255,0,0)") d3.select(this) .select('text') .classed('highlight', true) .attr('y,10') teamG // = d3.selectAll('g.overallG') .select('circle') .style('fill', function(p) { return p.region == d.region? teamColor.darker(.75) : teamColor.brighter(.5) // .darken() & .brighten() }) this.parentElement.appendChild(this); } // ** add interactivity on buttonClick function buttonClick(datapoint) { var maxValue = d3.max(incomingData, function(d) { return parseFloat(d[datapoint]); }); var radiusScale = d3.scale.linear().domain([0,maxValue]).range([2,20]); // var ybRamp = d3.scale.linear().domain([0, maxValue]).range(['yellow', 'blue']); // \ use interpolate to use any other scale than rgb - hsl, hcl // var ybRamp = d3.scale.linear().interpolate(d3.interpolateHsl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue green var ybRamp = d3.scale.linear().interpolate(d3.interpolateHcl).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue pink orange - use d3.hsl() when you darken pink (avoid muddying) // var ybRamp = d3.scale.linear().interpolate(d3.interpolateLab).domain([0,maxValue]).range(['yellow', 'blue']); // result: blue purple beige var tenColorScale = d3.scale.category10(["UEFA", "CONMEBOL", "CAF", "AFC"]); // \ colorbrewer: designed for qualitive data separated into categories: use quantize! - so we need to sort the numerical data into ranges var colorQuantize = d3.scale.quantize().domain([0,maxValue]).range(colorbrewer.Reds[3]); // sorts data into [3] categories according to data value teamG .select("circle") .transition() .duration(1000) .attr('r', function(p) { return radiusScale(p[datapoint]); }) // .style('fill', function(p) { // return ybRamp(p[datapoint]) // adds color for data values (magnitude) // }); // .style('fill', function(p) { // return tenColorScale(p.region) // adds color per region // }) .style('fill', function(p) { return colorQuantize(p[datapoint]); // each category has a different shade of red }) } // */ // *** use images // \ you can resize images on buttonClick - does not work well with raster img (= png, jpg etc) teamG .insert('image', 'text') // use insert() (not append()) to insert the images before the text elements, keeps the labels from being drawn behind the added images .attr('xlink:href', function(d) { return "img/" + d.team + ".png"; }) .attr('width', '45px') // you need to set width & height for svg images to show! .attr('height', '20px') .attr('x', '-22') // - 1/2 x value to center image .attr('y', '40'); // - 1/2 y value to center image = -10 // */ // -------------------------------------------------------------------- // *** modal with stats per team - uses modal.html // ** use d3.text() with .html() d3.text('modal.html', function(data) { d3.select('body') .append('div') // creates a new div .attr('id', 'modal') // with id as in main.css .html(data); // and fills it with html content from modal.html }); teamG.on('click', teamClick); function teamClick(d) { //selects and updates td.data as you click on a team d3.selectAll('td.data') // td with class data from modal.html .data(d3.values(d)) .html(function(p) { return p }); } // -------------------------------------------------------------------- // *** pregenerated .svg // * add with 'x-link:href' teamG // add svg molecule image to each team .insert('image', 'text') .attr('x-link:href', 'img/molecule.svg') .attr('width', '60') .attr('height', '60') .attr('x', '-30') .attr('y', '-80'); // ** use d3.html() so you can do more manipulation on HTML nodes with: // d3.html('img/football.svg', function(data) { // console.log(data); // contains <svg> > <g> > <path>, but we only want <p> // }) // ----- when you don't add svg to data d3.html('img/football.svg', loadSVG1); function loadSVG1(svgData) { //
/ ----- when you add svg to data d3.html('img/football.svg', loadSVG2); function loadSVG2(svgData) { // \ drawback1: can't use insert() so you need to put images in right order // \ drawback2: added with cloneNode() so they have no data bound to them > see next * d3.selectAll('g').each(function() { // each statement for each <g> var gParent = this; // = <g> node console.log(this); d3.select(svgData).selectAll('path').each(function() { // each statement for each <path> gParent.appendChild(this.cloneNode(true)) // clone the paths and append to each <g> (each team) }); }) recolorFootballs(); } function recolorFootballs() { // d3.selectAll('path') // .attr('transform', 'translate(-15, -15) scale(0.3)') // make the football smaller // .style('fill', '#d206a4') // .style('stroke-width', '2px') // .style('stroke', 'white') // */ // * (you can rebind: first select <g> .each() and then bind to <path> with .datum()= teamG.each(function(d) { d3.select(this) .selectAll('path') .datum(d) // 1 (singular for) data() - when you're binding 1 piece of data to an element) }); var tenColorScale = d3.scale.category10(["UEFA", "CONMEBOL", "CAF", "AFC"]); d3.selectAll('path') .attr('transform', 'translate(-15, -15) scale(0.3)') .style('fill', function (p) { return tenColorScale(p.region); // !! NOT DEFINED ERROR?? }) .style('stroke', 'white') .style('stroke-width', '2px'); } }
load svg into the fragment // \ .empty() checks if selection has elements inside it, fires true after we moved the paths out of the fragments into main svg // \ .empty() with while statement lets us move all path elements into the SVG canvas out of the fragment while (!d3.select(svgData).selectAll('path').empty()) { d3.select('svg').node().appendChild( // use .node() to access dom elements d3.select(svgData).select('path').node()); } d3.selectAll('path').attr('transform', 'translate(50,50)'); // move 50x and 50y from left corner (0,0) } /
identifier_body
System.py
""" Author: Nathanael Bayard Module Name: System Description: type representing linear systems, and functions pertaining to them, like Gauss's method of resolution """ from List import zipWith, firstIndex, isolateItem, unzipWith, subtractLists, filterOutIx from Bool import forall, isZero, isNotZero from Maybe import Maybe, Just, Nothing from Matrix import (idMatrix, nullVector, firstNonNullEachLine, negate, unitVector, familyFromMatrix, columnAt, isNullVector, ncols) # ==== General Description of the algorithm: # # the function that is called first is # systemSolution : [Num n] Matrix n . Vector n -> Maybe (Solution n) # with # Solution n = (Family n, Vector n) # # the first argument of `systemSolution` is the matrix `A` in # the equation `AX = Y`, the second argument is the vector `Y`. # # the first element of the type `Solution n` represents the basis # of the kernel of A, or is an empty list if the kernel is reduced to # the null vector. # the second element of `Solution n` is of course the particular # solution of the system found. # # if the system can't be solved, because at some point during # the algorithm, we ended up with an equation of `0 = x`, with # `x != 0`, then the result of `systemSolution` will be Nothing. # systemSolution : [Num n] Matrix n . Vector n -> Maybe (Solution n) def systemSolution(matrix, rightSide): maybeSystem = maybeSystemInit(matrix, rightSide).maybeDo(echelonized, 0) if maybeSystem == Nothing: # the system has no solution return Nothing else: system = maybeSystem.justValue() if len(system.pivotalLines) == 0: # extreme case: no pivot was found # during the echelonizing (which means the leftside # matrix A is null) and yet its result # is not Nothing, so the system does admit some # solution, so we conclude Y = 0 too, # and the kernel is thereafter the whole domain # of the linear application that could be # associated with A (if A has p columns, # that would canonically be R^p). # A particular solution can be any vector at all, # like the vector null. p = system.leftSideWidth solution = (idMatrix(p), nullVector(p)) return Just(solution) else: pivotalLines = keepPivotalLines(system) return Just(extractSolution(normalized(pivotalLines))) # ==== next step: # `systemSolution` calls # maybeSystemInit : [Num n] Matrix n . Vector n -> Maybe (System n) # # this function fuses/zips the elements of the rightside vector to the end # of each line of the left matrix. # then it calls the constructor `maybeSystem` with parameters # `pivotalLines = []` and `nonPivotalLines` as the previous result of # the gluing of both sides of the equation. # example: if the equation of the system is # 0 1 2 | x 9 # 3 4 5 | y = 9 # 6 7 8 | z 9 # then `maybeSystemInit would call: # maybeSystem([], [ [0,1,2,9], [3,4,5,9], [6,7,8,9] ]) # maybeSystemInit : [Num n] Matrix n . Vector n -> Maybe (System n) def maybeSystemInit(matrix, rightSide): def fuseToEnd(line, rightValue): return line + [rightValue] fusedLines = zipWith(fuseToEnd, matrix, rightSide) return maybeSystem([], fusedLines) # ==== next step: # `maybeSystem` is a 'smart' constructor that returns Nothing if # any line in either of its parameters is of the form [0,0,...,0,x] # with x != 0, which would correspond to an equation 0 = x != 0` # which would make the result of `systemSolution` be Nothing automatically, # thanks to the magic of the type `Maybe`! # # the constructor `maybeSystem` will be called at each step of the # algorithm, ensuring that if at any point, the system is found unsolvable, # no further operation will be performed. # maybeSystem : List (Rows n) . List (Rows n) -> Maybe (System n) def maybeSystem(pivotalLines, nonPivotalLines): if forall(pivotalLines + nonPivotalLines, isValidLine): return Maybe(value = System(pivotalLines, nonPivotalLines)) else: return Nothing # returns True if and only if the list # is not a series of zeroes ended with one # last non-zero value, as it would amount to # an equation of the form 0 = x != 0 # # isValidLine : List n -> Bool def isValidLine(line): if len(line) <= 1: error(isValidLine, "input list is too short to be part of a `System n`") leftSide = line[:-1] rightSide = line[-1] if forall(leftSide, isZero): return isZero(rightSide) else: return True # (you'll notice i grew tired of mentioning the ever-present # type class [Num n] of the parameter `n`...) # ==== small interlude to introduce the type/class `System n`: # class representing a system in the process of being solved. # mostly just contains two attributes, `pivotalLines` and # `nonPivotalLines`, each one being a list of vectors/lines. # # we'll search new pivots in the nonPivotalLines list, and # everytime we find a new pivot in a column, we'll move the corresponding # line to the group of the "pivotalLines". class System(): # System : List (Row n) . List (Row n) -> System n def __init__(self, pivotalLines, nonPivotalLines): self.pivotalLines = pivotalLines self.nonPivotalLines = nonPivotalLines allLines = pivotalLines + nonPivotalLines if len(allLines) == 0: error(System.__init__, "wrong input (two empty lists) " + "for System constructor") self.leftSideWidth = len(allLines[0]) - 1 # number of columns of the leftside matrix of the equation. # -1 because the last element of each line is # part of the right side (the vector Y in AX = Y) # this value will be used to avoid trying to find # a pivot in the right side column vector # ==== next step of the algorithm: # # back in `systemSolution`, with `Maybe` a valid system. If it is so, # the method `maybeDo` will call # echelonized : System n . Index -> Maybe (System n) # with this valid system as its first argument, and an additional parameter # `colIndex = 0`. # this function returns either `Just` an echelonized system, # or Nothing if at some point we encounter `0 = x != 0`. # # this function is recursive (indirectly). colIndex represents an index of # the column of the leftside matrix `A` in which we'll try # to find a pivot. the recursion will thus go through each column index # between 0 and `ncols(A)`. # echelonized : System n . Index -> Maybe (System n) def echelonized(system, colIndex): #print "DBG" #printMatrix(system.pivotalLines) #printMatrix(system.nonPivotalLines) maybePivotalLineIndex = findPivot(system, colIndex) if maybePivotalLineIndex == Nothing: # pivot not found => this column is filled with zeroes # on the non pivotal lines, so we do nothing maybeSystem = Just(system) else: pivotalLineIndex = maybePivotalLineIndex.justValue() maybeSystem = usePivot(system, pivotalLineIndex, colIndex) if maybeSystem == Nothing: return Nothing else: newSystem = maybeSystem.justValue() if colIndex >= newSystem.leftSideWidth - 1: # we reached the end of recursion, having # walked through all the columns of the leftside # matrix of the equation return Just(newSystem) else: # we repeat the process for the next column return echelonized(newSystem, colIndex + 1) # the previous function starts by calling # findPivot : System n . Index -> Maybe Index # which `Maybe` returns the index of the first non-pivotal line # (that is the line which wasn't used previously for the pivot # of another column) which # contains a non-null element at the index column `colIndex`. # returns Nothing if the whole column at that index is null, # or if there aren't any non pivotal lines remaining.
# to isolate the line into which the pivot was found from the aforementioned list. # findPivot : System n . Index -> Maybe Index def findPivot(system, colIndex): if len(system.nonPivotalLines) == 0: return Nothing col = columnAt(colIndex, system.nonPivotalLines) maybeLineIndex = firstIndex(isNotZero, col) #print "findPivot", x, colIndex return maybeLineIndex # ==== next step: # back in `echelonized`: if the index of the line of the pivot # given by `findPivot` is Nothing, we do nothing with the system # else, we call `usePivot` with the index of the soon-to-be pivotal line # usePivot : System n . Index . Index -> Maybe (System n) # # in usePivot: we start by isolating the new pivotal lines # from the rest of the still-not-yet pivotal lines. # then we recuperate the value of the pivot, using the # index of the column we're operating over. # # we create a function which will be used over all the lines # in `system`, both the pivotal ones and the non pivotal ones, # except the one that was just isolated, the one that contains # the pivot. # # the operation consists in creating zeroes everywhere in the column # except for the pivot. the core of the process is in the function # `modifiedLine` (i know, the name is not very appropriate... it's # the best i found though). # usePivot : System n . Index . Index -> Maybe (System n) def usePivot(system, pivotalLineIndex, colIndex): (pivotalLine, nonPivotalLines) = \ isolateItem(system.nonPivotalLines, pivotalLineIndex) pivot = pivotalLine[colIndex] #print "pivot", pivot def forEachLine(line): val = line[colIndex] coeff = val / pivot # that way, val - coeff * pivot == 0 return modifiedLine(line, pivotalLine, coeff) newPivotalLines = map(forEachLine, system.pivotalLines) + [pivotalLine] newNonPivotalLines = map(forEachLine, nonPivotalLines) return maybeSystem(newPivotalLines, newNonPivotalLines) # the function `modifiedLine` is straightforward: # subtract each value of `line` by the multiplication # of an appropriate coefficient with each value of # `otherLine`. # the appropriate coefficient is calculated so # that the element at the column of the current pivot # in the `line` become zero (cf `forEachLine` in usePivot) # modifiedLine : List n . List n . n -> List n def modifiedLine(line, otherLine, coeff): def f(val, otherVal): return val - otherVal * coeff return zipWith(f, line, otherLine) # ==== next step # back in usePivot: # as previously mentioned, we used forEachLine over both # the pivotal and non pivotal lines, except the currently # pivotal line (the one one which we found the pivot we're # actually using). # # from then on, we `Maybe` build a new system, not forgetting # to stash the latest pivotal lines with all the old pivotal lines # # maybeSystem will thereafter test that no line is invalid (0 = x != 0). # ==== next step # back to echelonized: we get back the result of usePivot. if Nothing, # we return Nothing and therefore break the recursion. if not, # we check if we're at the last column of the matrix A in AX = Y, and # if so, we return the result of usePivot and break the recursion. # if not, we call `echelonized` recursively again with the justValue() of # the result of `usePivot`, and an incremented column index. # i deem the recursion acceptable even in Python because # nobody will ever use this program to solve a system # of several hundreds of variables, will they? # ==== next step: # we exited `echelonized`, so we're back in `systemSolution`, with either # Nothing (in which case we directly return Nothing) # or with `Just` an echelonized system. # in which case: we have to take care of an extreme, special case: # when both sides of the equation are null (a null matrix and vector). # cf the body of `systemSolution` for more details. # # from now on we'll assume the leftside is not a null matrix, # and therefore we at least found one pivot. # we call keepPivotalLines over the echelonized system. # keepPivotalLines : System n -> List (Row n) # # this function is extremely short and nearly useless, but is standing alone # for the sake of clarity. its purpose is to mark the moment when # we throw away the remaining nonPivotalLines, because they're # necessarily just full of zeroes (otherwise they'd either be invalid, # or would imply the Gauss Algorithm implemented here utterly failed somehow # along the way). # this, of course, should never happen. # the result of keepPivotalLines is thus `system.pivotalLines : List (Row n)` # keepPivotalLines : System n -> List (Row n) def keepPivotalLines(system): if not forall(system.nonPivotalLines, isNullVector): error(keepPivotalLines, "somehow after successfully echelonizing, " + "one non pivotal line is not full of zeroes") return system.pivotalLines # ==== next step: # once keepPivotalLines has been called, it's the turn of # normalized : List (Row n) -> List (Row n) # whose job is to normalize each pivotal line, aka to # multiply each pivotal line by a coefficient # so that each pivot (which is also the first non-zero value # on each line) take the value of 1. # normalized : List (Row n) -> List (Row n) def normalized(pivotalLines): return map(normalizedLine, pivotalLines) # normalized delegates all the work to map and to: # normalizedLine : Row n -> Row n # which replaces each value in the input # with itself divided by the pivot, which # is always the first non-zero value # encountered in the list. # normalizedLine : Row n -> Row n def normalizedLine(line): maybePivotIx = firstIndex(isNotZero, line) if maybePivotIx == Nothing: # the line is full of zeroes: # should never happen error(normalizedLine, "the line is full of zeroes") pivot = line[maybePivotIx.justValue()] if pivot != 1: return map(lambda val: val / pivot, line) else: return line # ==== next step: # now's the time to extract the solution from # the normalized pivotal lines. # extractSolution : List (Row n) -> Solution n # let us be reminded that: # Solution n = (Family n, Vector n) # that is, a solution is the couple composed of # the kernel basis, or [] if the kernel is {0} # and of the particular solution to the system. # # it's the trickiest part of the algorithm. # # we start by ungluing the leftside and rightside of # each line, to recuperate a left matrix and right vector, # using `splitSides` below. # # we obtain `p = ncols(leftMatrix)`, # then we get the column indices of each pivot in each pivotal line # (that will be the first non null value of each line). # we use it to get the column indices which *don't* contain # any pivot, with the help of `substractLists`. # # we then negate all the elements of the left matrix. # it also negates the (normalized) pivots but we don't # care because they'll soon be thrown away anyway. # # for each non pivotal column index: # we build a unit vector with a `1` at that # column index, with `p` coordinates. # we then insert that vector inside the left Matrix # and we of course insert a corresponding 0 in the # right side vector. # # this finally done, we only need to get the columns # of the left side matrix, minus the columns of the # pivots, and we get the basis of the kernel. # # the particular solution is just the right side vector. # # we thereafter return the tuple of both of them together, # and the algorithm is done. # extractSolution : List (Row n) -> Solution n def extractSolution(lines): (leftMatrix, rightVector) = unzipWith(splitSides, lines) p = ncols(leftMatrix) maybePivotalColIs = firstNonNullEachLine(leftMatrix) if maybePivotalColIs == Nothing: error(extractSolution, "one pivotal line is full of zeroes") pivotalColIs = maybePivotalColIs.justValue() nonPivotalColIs = subtractLists(range(p), pivotalColIs) leftMatrix = negate(leftMatrix) for index in nonPivotalColIs: uv = unitVector(p, index) leftMatrix.insert(index, uv) rightVector.insert(index, 0) #print "filled" #printMatrix(leftMatrix) kernelBasis = filterOutIx(pivotalColIs, familyFromMatrix(leftMatrix)) particularSolution = rightVector return (kernelBasis, particularSolution) def splitSides(line): leftSide = line[:-1] rightSide = line[-1] return (leftSide, rightSide)
# that line index depends on the number of lines in system.nonPivotalLines, # but that's not a problem because we'll use that index only
random_line_split
System.py
""" Author: Nathanael Bayard Module Name: System Description: type representing linear systems, and functions pertaining to them, like Gauss's method of resolution """ from List import zipWith, firstIndex, isolateItem, unzipWith, subtractLists, filterOutIx from Bool import forall, isZero, isNotZero from Maybe import Maybe, Just, Nothing from Matrix import (idMatrix, nullVector, firstNonNullEachLine, negate, unitVector, familyFromMatrix, columnAt, isNullVector, ncols) # ==== General Description of the algorithm: # # the function that is called first is # systemSolution : [Num n] Matrix n . Vector n -> Maybe (Solution n) # with # Solution n = (Family n, Vector n) # # the first argument of `systemSolution` is the matrix `A` in # the equation `AX = Y`, the second argument is the vector `Y`. # # the first element of the type `Solution n` represents the basis # of the kernel of A, or is an empty list if the kernel is reduced to # the null vector. # the second element of `Solution n` is of course the particular # solution of the system found. # # if the system can't be solved, because at some point during # the algorithm, we ended up with an equation of `0 = x`, with # `x != 0`, then the result of `systemSolution` will be Nothing. # systemSolution : [Num n] Matrix n . Vector n -> Maybe (Solution n) def systemSolution(matrix, rightSide): maybeSystem = maybeSystemInit(matrix, rightSide).maybeDo(echelonized, 0) if maybeSystem == Nothing: # the system has no solution return Nothing else: system = maybeSystem.justValue() if len(system.pivotalLines) == 0: # extreme case: no pivot was found # during the echelonizing (which means the leftside # matrix A is null) and yet its result # is not Nothing, so the system does admit some # solution, so we conclude Y = 0 too, # and the kernel is thereafter the whole domain # of the linear application that could be # associated with A (if A has p columns, # that would canonically be R^p). # A particular solution can be any vector at all, # like the vector null. p = system.leftSideWidth solution = (idMatrix(p), nullVector(p)) return Just(solution) else: pivotalLines = keepPivotalLines(system) return Just(extractSolution(normalized(pivotalLines))) # ==== next step: # `systemSolution` calls # maybeSystemInit : [Num n] Matrix n . Vector n -> Maybe (System n) # # this function fuses/zips the elements of the rightside vector to the end # of each line of the left matrix. # then it calls the constructor `maybeSystem` with parameters # `pivotalLines = []` and `nonPivotalLines` as the previous result of # the gluing of both sides of the equation. # example: if the equation of the system is # 0 1 2 | x 9 # 3 4 5 | y = 9 # 6 7 8 | z 9 # then `maybeSystemInit would call: # maybeSystem([], [ [0,1,2,9], [3,4,5,9], [6,7,8,9] ]) # maybeSystemInit : [Num n] Matrix n . Vector n -> Maybe (System n) def maybeSystemInit(matrix, rightSide): def fuseToEnd(line, rightValue): return line + [rightValue] fusedLines = zipWith(fuseToEnd, matrix, rightSide) return maybeSystem([], fusedLines) # ==== next step: # `maybeSystem` is a 'smart' constructor that returns Nothing if # any line in either of its parameters is of the form [0,0,...,0,x] # with x != 0, which would correspond to an equation 0 = x != 0` # which would make the result of `systemSolution` be Nothing automatically, # thanks to the magic of the type `Maybe`! # # the constructor `maybeSystem` will be called at each step of the # algorithm, ensuring that if at any point, the system is found unsolvable, # no further operation will be performed. # maybeSystem : List (Rows n) . List (Rows n) -> Maybe (System n) def maybeSystem(pivotalLines, nonPivotalLines): if forall(pivotalLines + nonPivotalLines, isValidLine): return Maybe(value = System(pivotalLines, nonPivotalLines)) else: return Nothing # returns True if and only if the list # is not a series of zeroes ended with one # last non-zero value, as it would amount to # an equation of the form 0 = x != 0 # # isValidLine : List n -> Bool def isValidLine(line): if len(line) <= 1: error(isValidLine, "input list is too short to be part of a `System n`") leftSide = line[:-1] rightSide = line[-1] if forall(leftSide, isZero): return isZero(rightSide) else: return True # (you'll notice i grew tired of mentioning the ever-present # type class [Num n] of the parameter `n`...) # ==== small interlude to introduce the type/class `System n`: # class representing a system in the process of being solved. # mostly just contains two attributes, `pivotalLines` and # `nonPivotalLines`, each one being a list of vectors/lines. # # we'll search new pivots in the nonPivotalLines list, and # everytime we find a new pivot in a column, we'll move the corresponding # line to the group of the "pivotalLines". class System(): # System : List (Row n) . List (Row n) -> System n def __init__(self, pivotalLines, nonPivotalLines): self.pivotalLines = pivotalLines self.nonPivotalLines = nonPivotalLines allLines = pivotalLines + nonPivotalLines if len(allLines) == 0: error(System.__init__, "wrong input (two empty lists) " + "for System constructor") self.leftSideWidth = len(allLines[0]) - 1 # number of columns of the leftside matrix of the equation. # -1 because the last element of each line is # part of the right side (the vector Y in AX = Y) # this value will be used to avoid trying to find # a pivot in the right side column vector # ==== next step of the algorithm: # # back in `systemSolution`, with `Maybe` a valid system. If it is so, # the method `maybeDo` will call # echelonized : System n . Index -> Maybe (System n) # with this valid system as its first argument, and an additional parameter # `colIndex = 0`. # this function returns either `Just` an echelonized system, # or Nothing if at some point we encounter `0 = x != 0`. # # this function is recursive (indirectly). colIndex represents an index of # the column of the leftside matrix `A` in which we'll try # to find a pivot. the recursion will thus go through each column index # between 0 and `ncols(A)`. # echelonized : System n . Index -> Maybe (System n) def echelonized(system, colIndex): #print "DBG" #printMatrix(system.pivotalLines) #printMatrix(system.nonPivotalLines) maybePivotalLineIndex = findPivot(system, colIndex) if maybePivotalLineIndex == Nothing: # pivot not found => this column is filled with zeroes # on the non pivotal lines, so we do nothing maybeSystem = Just(system) else: pivotalLineIndex = maybePivotalLineIndex.justValue() maybeSystem = usePivot(system, pivotalLineIndex, colIndex) if maybeSystem == Nothing: return Nothing else: newSystem = maybeSystem.justValue() if colIndex >= newSystem.leftSideWidth - 1: # we reached the end of recursion, having # walked through all the columns of the leftside # matrix of the equation return Just(newSystem) else: # we repeat the process for the next column return echelonized(newSystem, colIndex + 1) # the previous function starts by calling # findPivot : System n . Index -> Maybe Index # which `Maybe` returns the index of the first non-pivotal line # (that is the line which wasn't used previously for the pivot # of another column) which # contains a non-null element at the index column `colIndex`. # returns Nothing if the whole column at that index is null, # or if there aren't any non pivotal lines remaining. # that line index depends on the number of lines in system.nonPivotalLines, # but that's not a problem because we'll use that index only # to isolate the line into which the pivot was found from the aforementioned list. # findPivot : System n . Index -> Maybe Index def findPivot(system, colIndex): if len(system.nonPivotalLines) == 0: return Nothing col = columnAt(colIndex, system.nonPivotalLines) maybeLineIndex = firstIndex(isNotZero, col) #print "findPivot", x, colIndex return maybeLineIndex # ==== next step: # back in `echelonized`: if the index of the line of the pivot # given by `findPivot` is Nothing, we do nothing with the system # else, we call `usePivot` with the index of the soon-to-be pivotal line # usePivot : System n . Index . Index -> Maybe (System n) # # in usePivot: we start by isolating the new pivotal lines # from the rest of the still-not-yet pivotal lines. # then we recuperate the value of the pivot, using the # index of the column we're operating over. # # we create a function which will be used over all the lines # in `system`, both the pivotal ones and the non pivotal ones, # except the one that was just isolated, the one that contains # the pivot. # # the operation consists in creating zeroes everywhere in the column # except for the pivot. the core of the process is in the function # `modifiedLine` (i know, the name is not very appropriate... it's # the best i found though). # usePivot : System n . Index . Index -> Maybe (System n) def usePivot(system, pivotalLineIndex, colIndex): (pivotalLine, nonPivotalLines) = \ isolateItem(system.nonPivotalLines, pivotalLineIndex) pivot = pivotalLine[colIndex] #print "pivot", pivot def forEachLine(line): val = line[colIndex] coeff = val / pivot # that way, val - coeff * pivot == 0 return modifiedLine(line, pivotalLine, coeff) newPivotalLines = map(forEachLine, system.pivotalLines) + [pivotalLine] newNonPivotalLines = map(forEachLine, nonPivotalLines) return maybeSystem(newPivotalLines, newNonPivotalLines) # the function `modifiedLine` is straightforward: # subtract each value of `line` by the multiplication # of an appropriate coefficient with each value of # `otherLine`. # the appropriate coefficient is calculated so # that the element at the column of the current pivot # in the `line` become zero (cf `forEachLine` in usePivot) # modifiedLine : List n . List n . n -> List n def modifiedLine(line, otherLine, coeff): def f(val, otherVal): return val - otherVal * coeff return zipWith(f, line, otherLine) # ==== next step # back in usePivot: # as previously mentioned, we used forEachLine over both # the pivotal and non pivotal lines, except the currently # pivotal line (the one one which we found the pivot we're # actually using). # # from then on, we `Maybe` build a new system, not forgetting # to stash the latest pivotal lines with all the old pivotal lines # # maybeSystem will thereafter test that no line is invalid (0 = x != 0). # ==== next step # back to echelonized: we get back the result of usePivot. if Nothing, # we return Nothing and therefore break the recursion. if not, # we check if we're at the last column of the matrix A in AX = Y, and # if so, we return the result of usePivot and break the recursion. # if not, we call `echelonized` recursively again with the justValue() of # the result of `usePivot`, and an incremented column index. # i deem the recursion acceptable even in Python because # nobody will ever use this program to solve a system # of several hundreds of variables, will they? # ==== next step: # we exited `echelonized`, so we're back in `systemSolution`, with either # Nothing (in which case we directly return Nothing) # or with `Just` an echelonized system. # in which case: we have to take care of an extreme, special case: # when both sides of the equation are null (a null matrix and vector). # cf the body of `systemSolution` for more details. # # from now on we'll assume the leftside is not a null matrix, # and therefore we at least found one pivot. # we call keepPivotalLines over the echelonized system. # keepPivotalLines : System n -> List (Row n) # # this function is extremely short and nearly useless, but is standing alone # for the sake of clarity. its purpose is to mark the moment when # we throw away the remaining nonPivotalLines, because they're # necessarily just full of zeroes (otherwise they'd either be invalid, # or would imply the Gauss Algorithm implemented here utterly failed somehow # along the way). # this, of course, should never happen. # the result of keepPivotalLines is thus `system.pivotalLines : List (Row n)` # keepPivotalLines : System n -> List (Row n) def keepPivotalLines(system): if not forall(system.nonPivotalLines, isNullVector): error(keepPivotalLines, "somehow after successfully echelonizing, " + "one non pivotal line is not full of zeroes") return system.pivotalLines # ==== next step: # once keepPivotalLines has been called, it's the turn of # normalized : List (Row n) -> List (Row n) # whose job is to normalize each pivotal line, aka to # multiply each pivotal line by a coefficient # so that each pivot (which is also the first non-zero value # on each line) take the value of 1. # normalized : List (Row n) -> List (Row n) def normalized(pivotalLines):
# normalized delegates all the work to map and to: # normalizedLine : Row n -> Row n # which replaces each value in the input # with itself divided by the pivot, which # is always the first non-zero value # encountered in the list. # normalizedLine : Row n -> Row n def normalizedLine(line): maybePivotIx = firstIndex(isNotZero, line) if maybePivotIx == Nothing: # the line is full of zeroes: # should never happen error(normalizedLine, "the line is full of zeroes") pivot = line[maybePivotIx.justValue()] if pivot != 1: return map(lambda val: val / pivot, line) else: return line # ==== next step: # now's the time to extract the solution from # the normalized pivotal lines. # extractSolution : List (Row n) -> Solution n # let us be reminded that: # Solution n = (Family n, Vector n) # that is, a solution is the couple composed of # the kernel basis, or [] if the kernel is {0} # and of the particular solution to the system. # # it's the trickiest part of the algorithm. # # we start by ungluing the leftside and rightside of # each line, to recuperate a left matrix and right vector, # using `splitSides` below. # # we obtain `p = ncols(leftMatrix)`, # then we get the column indices of each pivot in each pivotal line # (that will be the first non null value of each line). # we use it to get the column indices which *don't* contain # any pivot, with the help of `substractLists`. # # we then negate all the elements of the left matrix. # it also negates the (normalized) pivots but we don't # care because they'll soon be thrown away anyway. # # for each non pivotal column index: # we build a unit vector with a `1` at that # column index, with `p` coordinates. # we then insert that vector inside the left Matrix # and we of course insert a corresponding 0 in the # right side vector. # # this finally done, we only need to get the columns # of the left side matrix, minus the columns of the # pivots, and we get the basis of the kernel. # # the particular solution is just the right side vector. # # we thereafter return the tuple of both of them together, # and the algorithm is done. # extractSolution : List (Row n) -> Solution n def extractSolution(lines): (leftMatrix, rightVector) = unzipWith(splitSides, lines) p = ncols(leftMatrix) maybePivotalColIs = firstNonNullEachLine(leftMatrix) if maybePivotalColIs == Nothing: error(extractSolution, "one pivotal line is full of zeroes") pivotalColIs = maybePivotalColIs.justValue() nonPivotalColIs = subtractLists(range(p), pivotalColIs) leftMatrix = negate(leftMatrix) for index in nonPivotalColIs: uv = unitVector(p, index) leftMatrix.insert(index, uv) rightVector.insert(index, 0) #print "filled" #printMatrix(leftMatrix) kernelBasis = filterOutIx(pivotalColIs, familyFromMatrix(leftMatrix)) particularSolution = rightVector return (kernelBasis, particularSolution) def splitSides(line): leftSide = line[:-1] rightSide = line[-1] return (leftSide, rightSide)
return map(normalizedLine, pivotalLines)
identifier_body
System.py
""" Author: Nathanael Bayard Module Name: System Description: type representing linear systems, and functions pertaining to them, like Gauss's method of resolution """ from List import zipWith, firstIndex, isolateItem, unzipWith, subtractLists, filterOutIx from Bool import forall, isZero, isNotZero from Maybe import Maybe, Just, Nothing from Matrix import (idMatrix, nullVector, firstNonNullEachLine, negate, unitVector, familyFromMatrix, columnAt, isNullVector, ncols) # ==== General Description of the algorithm: # # the function that is called first is # systemSolution : [Num n] Matrix n . Vector n -> Maybe (Solution n) # with # Solution n = (Family n, Vector n) # # the first argument of `systemSolution` is the matrix `A` in # the equation `AX = Y`, the second argument is the vector `Y`. # # the first element of the type `Solution n` represents the basis # of the kernel of A, or is an empty list if the kernel is reduced to # the null vector. # the second element of `Solution n` is of course the particular # solution of the system found. # # if the system can't be solved, because at some point during # the algorithm, we ended up with an equation of `0 = x`, with # `x != 0`, then the result of `systemSolution` will be Nothing. # systemSolution : [Num n] Matrix n . Vector n -> Maybe (Solution n) def systemSolution(matrix, rightSide): maybeSystem = maybeSystemInit(matrix, rightSide).maybeDo(echelonized, 0) if maybeSystem == Nothing: # the system has no solution return Nothing else: system = maybeSystem.justValue() if len(system.pivotalLines) == 0: # extreme case: no pivot was found # during the echelonizing (which means the leftside # matrix A is null) and yet its result # is not Nothing, so the system does admit some # solution, so we conclude Y = 0 too, # and the kernel is thereafter the whole domain # of the linear application that could be # associated with A (if A has p columns, # that would canonically be R^p). # A particular solution can be any vector at all, # like the vector null. p = system.leftSideWidth solution = (idMatrix(p), nullVector(p)) return Just(solution) else: pivotalLines = keepPivotalLines(system) return Just(extractSolution(normalized(pivotalLines))) # ==== next step: # `systemSolution` calls # maybeSystemInit : [Num n] Matrix n . Vector n -> Maybe (System n) # # this function fuses/zips the elements of the rightside vector to the end # of each line of the left matrix. # then it calls the constructor `maybeSystem` with parameters # `pivotalLines = []` and `nonPivotalLines` as the previous result of # the gluing of both sides of the equation. # example: if the equation of the system is # 0 1 2 | x 9 # 3 4 5 | y = 9 # 6 7 8 | z 9 # then `maybeSystemInit would call: # maybeSystem([], [ [0,1,2,9], [3,4,5,9], [6,7,8,9] ]) # maybeSystemInit : [Num n] Matrix n . Vector n -> Maybe (System n) def maybeSystemInit(matrix, rightSide): def fuseToEnd(line, rightValue): return line + [rightValue] fusedLines = zipWith(fuseToEnd, matrix, rightSide) return maybeSystem([], fusedLines) # ==== next step: # `maybeSystem` is a 'smart' constructor that returns Nothing if # any line in either of its parameters is of the form [0,0,...,0,x] # with x != 0, which would correspond to an equation 0 = x != 0` # which would make the result of `systemSolution` be Nothing automatically, # thanks to the magic of the type `Maybe`! # # the constructor `maybeSystem` will be called at each step of the # algorithm, ensuring that if at any point, the system is found unsolvable, # no further operation will be performed. # maybeSystem : List (Rows n) . List (Rows n) -> Maybe (System n) def maybeSystem(pivotalLines, nonPivotalLines): if forall(pivotalLines + nonPivotalLines, isValidLine): return Maybe(value = System(pivotalLines, nonPivotalLines)) else: return Nothing # returns True if and only if the list # is not a series of zeroes ended with one # last non-zero value, as it would amount to # an equation of the form 0 = x != 0 # # isValidLine : List n -> Bool def isValidLine(line): if len(line) <= 1: error(isValidLine, "input list is too short to be part of a `System n`") leftSide = line[:-1] rightSide = line[-1] if forall(leftSide, isZero): return isZero(rightSide) else: return True # (you'll notice i grew tired of mentioning the ever-present # type class [Num n] of the parameter `n`...) # ==== small interlude to introduce the type/class `System n`: # class representing a system in the process of being solved. # mostly just contains two attributes, `pivotalLines` and # `nonPivotalLines`, each one being a list of vectors/lines. # # we'll search new pivots in the nonPivotalLines list, and # everytime we find a new pivot in a column, we'll move the corresponding # line to the group of the "pivotalLines". class System(): # System : List (Row n) . List (Row n) -> System n def __init__(self, pivotalLines, nonPivotalLines): self.pivotalLines = pivotalLines self.nonPivotalLines = nonPivotalLines allLines = pivotalLines + nonPivotalLines if len(allLines) == 0: error(System.__init__, "wrong input (two empty lists) " + "for System constructor") self.leftSideWidth = len(allLines[0]) - 1 # number of columns of the leftside matrix of the equation. # -1 because the last element of each line is # part of the right side (the vector Y in AX = Y) # this value will be used to avoid trying to find # a pivot in the right side column vector # ==== next step of the algorithm: # # back in `systemSolution`, with `Maybe` a valid system. If it is so, # the method `maybeDo` will call # echelonized : System n . Index -> Maybe (System n) # with this valid system as its first argument, and an additional parameter # `colIndex = 0`. # this function returns either `Just` an echelonized system, # or Nothing if at some point we encounter `0 = x != 0`. # # this function is recursive (indirectly). colIndex represents an index of # the column of the leftside matrix `A` in which we'll try # to find a pivot. the recursion will thus go through each column index # between 0 and `ncols(A)`. # echelonized : System n . Index -> Maybe (System n) def echelonized(system, colIndex): #print "DBG" #printMatrix(system.pivotalLines) #printMatrix(system.nonPivotalLines) maybePivotalLineIndex = findPivot(system, colIndex) if maybePivotalLineIndex == Nothing: # pivot not found => this column is filled with zeroes # on the non pivotal lines, so we do nothing maybeSystem = Just(system) else: pivotalLineIndex = maybePivotalLineIndex.justValue() maybeSystem = usePivot(system, pivotalLineIndex, colIndex) if maybeSystem == Nothing: return Nothing else: newSystem = maybeSystem.justValue() if colIndex >= newSystem.leftSideWidth - 1: # we reached the end of recursion, having # walked through all the columns of the leftside # matrix of the equation return Just(newSystem) else: # we repeat the process for the next column
# the previous function starts by calling # findPivot : System n . Index -> Maybe Index # which `Maybe` returns the index of the first non-pivotal line # (that is the line which wasn't used previously for the pivot # of another column) which # contains a non-null element at the index column `colIndex`. # returns Nothing if the whole column at that index is null, # or if there aren't any non pivotal lines remaining. # that line index depends on the number of lines in system.nonPivotalLines, # but that's not a problem because we'll use that index only # to isolate the line into which the pivot was found from the aforementioned list. # findPivot : System n . Index -> Maybe Index def findPivot(system, colIndex): if len(system.nonPivotalLines) == 0: return Nothing col = columnAt(colIndex, system.nonPivotalLines) maybeLineIndex = firstIndex(isNotZero, col) #print "findPivot", x, colIndex return maybeLineIndex # ==== next step: # back in `echelonized`: if the index of the line of the pivot # given by `findPivot` is Nothing, we do nothing with the system # else, we call `usePivot` with the index of the soon-to-be pivotal line # usePivot : System n . Index . Index -> Maybe (System n) # # in usePivot: we start by isolating the new pivotal lines # from the rest of the still-not-yet pivotal lines. # then we recuperate the value of the pivot, using the # index of the column we're operating over. # # we create a function which will be used over all the lines # in `system`, both the pivotal ones and the non pivotal ones, # except the one that was just isolated, the one that contains # the pivot. # # the operation consists in creating zeroes everywhere in the column # except for the pivot. the core of the process is in the function # `modifiedLine` (i know, the name is not very appropriate... it's # the best i found though). # usePivot : System n . Index . Index -> Maybe (System n) def usePivot(system, pivotalLineIndex, colIndex): (pivotalLine, nonPivotalLines) = \ isolateItem(system.nonPivotalLines, pivotalLineIndex) pivot = pivotalLine[colIndex] #print "pivot", pivot def forEachLine(line): val = line[colIndex] coeff = val / pivot # that way, val - coeff * pivot == 0 return modifiedLine(line, pivotalLine, coeff) newPivotalLines = map(forEachLine, system.pivotalLines) + [pivotalLine] newNonPivotalLines = map(forEachLine, nonPivotalLines) return maybeSystem(newPivotalLines, newNonPivotalLines) # the function `modifiedLine` is straightforward: # subtract each value of `line` by the multiplication # of an appropriate coefficient with each value of # `otherLine`. # the appropriate coefficient is calculated so # that the element at the column of the current pivot # in the `line` become zero (cf `forEachLine` in usePivot) # modifiedLine : List n . List n . n -> List n def modifiedLine(line, otherLine, coeff): def f(val, otherVal): return val - otherVal * coeff return zipWith(f, line, otherLine) # ==== next step # back in usePivot: # as previously mentioned, we used forEachLine over both # the pivotal and non pivotal lines, except the currently # pivotal line (the one one which we found the pivot we're # actually using). # # from then on, we `Maybe` build a new system, not forgetting # to stash the latest pivotal lines with all the old pivotal lines # # maybeSystem will thereafter test that no line is invalid (0 = x != 0). # ==== next step # back to echelonized: we get back the result of usePivot. if Nothing, # we return Nothing and therefore break the recursion. if not, # we check if we're at the last column of the matrix A in AX = Y, and # if so, we return the result of usePivot and break the recursion. # if not, we call `echelonized` recursively again with the justValue() of # the result of `usePivot`, and an incremented column index. # i deem the recursion acceptable even in Python because # nobody will ever use this program to solve a system # of several hundreds of variables, will they? # ==== next step: # we exited `echelonized`, so we're back in `systemSolution`, with either # Nothing (in which case we directly return Nothing) # or with `Just` an echelonized system. # in which case: we have to take care of an extreme, special case: # when both sides of the equation are null (a null matrix and vector). # cf the body of `systemSolution` for more details. # # from now on we'll assume the leftside is not a null matrix, # and therefore we at least found one pivot. # we call keepPivotalLines over the echelonized system. # keepPivotalLines : System n -> List (Row n) # # this function is extremely short and nearly useless, but is standing alone # for the sake of clarity. its purpose is to mark the moment when # we throw away the remaining nonPivotalLines, because they're # necessarily just full of zeroes (otherwise they'd either be invalid, # or would imply the Gauss Algorithm implemented here utterly failed somehow # along the way). # this, of course, should never happen. # the result of keepPivotalLines is thus `system.pivotalLines : List (Row n)` # keepPivotalLines : System n -> List (Row n) def keepPivotalLines(system): if not forall(system.nonPivotalLines, isNullVector): error(keepPivotalLines, "somehow after successfully echelonizing, " + "one non pivotal line is not full of zeroes") return system.pivotalLines # ==== next step: # once keepPivotalLines has been called, it's the turn of # normalized : List (Row n) -> List (Row n) # whose job is to normalize each pivotal line, aka to # multiply each pivotal line by a coefficient # so that each pivot (which is also the first non-zero value # on each line) take the value of 1. # normalized : List (Row n) -> List (Row n) def normalized(pivotalLines): return map(normalizedLine, pivotalLines) # normalized delegates all the work to map and to: # normalizedLine : Row n -> Row n # which replaces each value in the input # with itself divided by the pivot, which # is always the first non-zero value # encountered in the list. # normalizedLine : Row n -> Row n def normalizedLine(line): maybePivotIx = firstIndex(isNotZero, line) if maybePivotIx == Nothing: # the line is full of zeroes: # should never happen error(normalizedLine, "the line is full of zeroes") pivot = line[maybePivotIx.justValue()] if pivot != 1: return map(lambda val: val / pivot, line) else: return line # ==== next step: # now's the time to extract the solution from # the normalized pivotal lines. # extractSolution : List (Row n) -> Solution n # let us be reminded that: # Solution n = (Family n, Vector n) # that is, a solution is the couple composed of # the kernel basis, or [] if the kernel is {0} # and of the particular solution to the system. # # it's the trickiest part of the algorithm. # # we start by ungluing the leftside and rightside of # each line, to recuperate a left matrix and right vector, # using `splitSides` below. # # we obtain `p = ncols(leftMatrix)`, # then we get the column indices of each pivot in each pivotal line # (that will be the first non null value of each line). # we use it to get the column indices which *don't* contain # any pivot, with the help of `substractLists`. # # we then negate all the elements of the left matrix. # it also negates the (normalized) pivots but we don't # care because they'll soon be thrown away anyway. # # for each non pivotal column index: # we build a unit vector with a `1` at that # column index, with `p` coordinates. # we then insert that vector inside the left Matrix # and we of course insert a corresponding 0 in the # right side vector. # # this finally done, we only need to get the columns # of the left side matrix, minus the columns of the # pivots, and we get the basis of the kernel. # # the particular solution is just the right side vector. # # we thereafter return the tuple of both of them together, # and the algorithm is done. # extractSolution : List (Row n) -> Solution n def extractSolution(lines): (leftMatrix, rightVector) = unzipWith(splitSides, lines) p = ncols(leftMatrix) maybePivotalColIs = firstNonNullEachLine(leftMatrix) if maybePivotalColIs == Nothing: error(extractSolution, "one pivotal line is full of zeroes") pivotalColIs = maybePivotalColIs.justValue() nonPivotalColIs = subtractLists(range(p), pivotalColIs) leftMatrix = negate(leftMatrix) for index in nonPivotalColIs: uv = unitVector(p, index) leftMatrix.insert(index, uv) rightVector.insert(index, 0) #print "filled" #printMatrix(leftMatrix) kernelBasis = filterOutIx(pivotalColIs, familyFromMatrix(leftMatrix)) particularSolution = rightVector return (kernelBasis, particularSolution) def splitSides(line): leftSide = line[:-1] rightSide = line[-1] return (leftSide, rightSide)
return echelonized(newSystem, colIndex + 1)
conditional_block
System.py
""" Author: Nathanael Bayard Module Name: System Description: type representing linear systems, and functions pertaining to them, like Gauss's method of resolution """ from List import zipWith, firstIndex, isolateItem, unzipWith, subtractLists, filterOutIx from Bool import forall, isZero, isNotZero from Maybe import Maybe, Just, Nothing from Matrix import (idMatrix, nullVector, firstNonNullEachLine, negate, unitVector, familyFromMatrix, columnAt, isNullVector, ncols) # ==== General Description of the algorithm: # # the function that is called first is # systemSolution : [Num n] Matrix n . Vector n -> Maybe (Solution n) # with # Solution n = (Family n, Vector n) # # the first argument of `systemSolution` is the matrix `A` in # the equation `AX = Y`, the second argument is the vector `Y`. # # the first element of the type `Solution n` represents the basis # of the kernel of A, or is an empty list if the kernel is reduced to # the null vector. # the second element of `Solution n` is of course the particular # solution of the system found. # # if the system can't be solved, because at some point during # the algorithm, we ended up with an equation of `0 = x`, with # `x != 0`, then the result of `systemSolution` will be Nothing. # systemSolution : [Num n] Matrix n . Vector n -> Maybe (Solution n) def systemSolution(matrix, rightSide): maybeSystem = maybeSystemInit(matrix, rightSide).maybeDo(echelonized, 0) if maybeSystem == Nothing: # the system has no solution return Nothing else: system = maybeSystem.justValue() if len(system.pivotalLines) == 0: # extreme case: no pivot was found # during the echelonizing (which means the leftside # matrix A is null) and yet its result # is not Nothing, so the system does admit some # solution, so we conclude Y = 0 too, # and the kernel is thereafter the whole domain # of the linear application that could be # associated with A (if A has p columns, # that would canonically be R^p). # A particular solution can be any vector at all, # like the vector null. p = system.leftSideWidth solution = (idMatrix(p), nullVector(p)) return Just(solution) else: pivotalLines = keepPivotalLines(system) return Just(extractSolution(normalized(pivotalLines))) # ==== next step: # `systemSolution` calls # maybeSystemInit : [Num n] Matrix n . Vector n -> Maybe (System n) # # this function fuses/zips the elements of the rightside vector to the end # of each line of the left matrix. # then it calls the constructor `maybeSystem` with parameters # `pivotalLines = []` and `nonPivotalLines` as the previous result of # the gluing of both sides of the equation. # example: if the equation of the system is # 0 1 2 | x 9 # 3 4 5 | y = 9 # 6 7 8 | z 9 # then `maybeSystemInit would call: # maybeSystem([], [ [0,1,2,9], [3,4,5,9], [6,7,8,9] ]) # maybeSystemInit : [Num n] Matrix n . Vector n -> Maybe (System n) def maybeSystemInit(matrix, rightSide): def fuseToEnd(line, rightValue): return line + [rightValue] fusedLines = zipWith(fuseToEnd, matrix, rightSide) return maybeSystem([], fusedLines) # ==== next step: # `maybeSystem` is a 'smart' constructor that returns Nothing if # any line in either of its parameters is of the form [0,0,...,0,x] # with x != 0, which would correspond to an equation 0 = x != 0` # which would make the result of `systemSolution` be Nothing automatically, # thanks to the magic of the type `Maybe`! # # the constructor `maybeSystem` will be called at each step of the # algorithm, ensuring that if at any point, the system is found unsolvable, # no further operation will be performed. # maybeSystem : List (Rows n) . List (Rows n) -> Maybe (System n) def
(pivotalLines, nonPivotalLines): if forall(pivotalLines + nonPivotalLines, isValidLine): return Maybe(value = System(pivotalLines, nonPivotalLines)) else: return Nothing # returns True if and only if the list # is not a series of zeroes ended with one # last non-zero value, as it would amount to # an equation of the form 0 = x != 0 # # isValidLine : List n -> Bool def isValidLine(line): if len(line) <= 1: error(isValidLine, "input list is too short to be part of a `System n`") leftSide = line[:-1] rightSide = line[-1] if forall(leftSide, isZero): return isZero(rightSide) else: return True # (you'll notice i grew tired of mentioning the ever-present # type class [Num n] of the parameter `n`...) # ==== small interlude to introduce the type/class `System n`: # class representing a system in the process of being solved. # mostly just contains two attributes, `pivotalLines` and # `nonPivotalLines`, each one being a list of vectors/lines. # # we'll search new pivots in the nonPivotalLines list, and # everytime we find a new pivot in a column, we'll move the corresponding # line to the group of the "pivotalLines". class System(): # System : List (Row n) . List (Row n) -> System n def __init__(self, pivotalLines, nonPivotalLines): self.pivotalLines = pivotalLines self.nonPivotalLines = nonPivotalLines allLines = pivotalLines + nonPivotalLines if len(allLines) == 0: error(System.__init__, "wrong input (two empty lists) " + "for System constructor") self.leftSideWidth = len(allLines[0]) - 1 # number of columns of the leftside matrix of the equation. # -1 because the last element of each line is # part of the right side (the vector Y in AX = Y) # this value will be used to avoid trying to find # a pivot in the right side column vector # ==== next step of the algorithm: # # back in `systemSolution`, with `Maybe` a valid system. If it is so, # the method `maybeDo` will call # echelonized : System n . Index -> Maybe (System n) # with this valid system as its first argument, and an additional parameter # `colIndex = 0`. # this function returns either `Just` an echelonized system, # or Nothing if at some point we encounter `0 = x != 0`. # # this function is recursive (indirectly). colIndex represents an index of # the column of the leftside matrix `A` in which we'll try # to find a pivot. the recursion will thus go through each column index # between 0 and `ncols(A)`. # echelonized : System n . Index -> Maybe (System n) def echelonized(system, colIndex): #print "DBG" #printMatrix(system.pivotalLines) #printMatrix(system.nonPivotalLines) maybePivotalLineIndex = findPivot(system, colIndex) if maybePivotalLineIndex == Nothing: # pivot not found => this column is filled with zeroes # on the non pivotal lines, so we do nothing maybeSystem = Just(system) else: pivotalLineIndex = maybePivotalLineIndex.justValue() maybeSystem = usePivot(system, pivotalLineIndex, colIndex) if maybeSystem == Nothing: return Nothing else: newSystem = maybeSystem.justValue() if colIndex >= newSystem.leftSideWidth - 1: # we reached the end of recursion, having # walked through all the columns of the leftside # matrix of the equation return Just(newSystem) else: # we repeat the process for the next column return echelonized(newSystem, colIndex + 1) # the previous function starts by calling # findPivot : System n . Index -> Maybe Index # which `Maybe` returns the index of the first non-pivotal line # (that is the line which wasn't used previously for the pivot # of another column) which # contains a non-null element at the index column `colIndex`. # returns Nothing if the whole column at that index is null, # or if there aren't any non pivotal lines remaining. # that line index depends on the number of lines in system.nonPivotalLines, # but that's not a problem because we'll use that index only # to isolate the line into which the pivot was found from the aforementioned list. # findPivot : System n . Index -> Maybe Index def findPivot(system, colIndex): if len(system.nonPivotalLines) == 0: return Nothing col = columnAt(colIndex, system.nonPivotalLines) maybeLineIndex = firstIndex(isNotZero, col) #print "findPivot", x, colIndex return maybeLineIndex # ==== next step: # back in `echelonized`: if the index of the line of the pivot # given by `findPivot` is Nothing, we do nothing with the system # else, we call `usePivot` with the index of the soon-to-be pivotal line # usePivot : System n . Index . Index -> Maybe (System n) # # in usePivot: we start by isolating the new pivotal lines # from the rest of the still-not-yet pivotal lines. # then we recuperate the value of the pivot, using the # index of the column we're operating over. # # we create a function which will be used over all the lines # in `system`, both the pivotal ones and the non pivotal ones, # except the one that was just isolated, the one that contains # the pivot. # # the operation consists in creating zeroes everywhere in the column # except for the pivot. the core of the process is in the function # `modifiedLine` (i know, the name is not very appropriate... it's # the best i found though). # usePivot : System n . Index . Index -> Maybe (System n) def usePivot(system, pivotalLineIndex, colIndex): (pivotalLine, nonPivotalLines) = \ isolateItem(system.nonPivotalLines, pivotalLineIndex) pivot = pivotalLine[colIndex] #print "pivot", pivot def forEachLine(line): val = line[colIndex] coeff = val / pivot # that way, val - coeff * pivot == 0 return modifiedLine(line, pivotalLine, coeff) newPivotalLines = map(forEachLine, system.pivotalLines) + [pivotalLine] newNonPivotalLines = map(forEachLine, nonPivotalLines) return maybeSystem(newPivotalLines, newNonPivotalLines) # the function `modifiedLine` is straightforward: # subtract each value of `line` by the multiplication # of an appropriate coefficient with each value of # `otherLine`. # the appropriate coefficient is calculated so # that the element at the column of the current pivot # in the `line` become zero (cf `forEachLine` in usePivot) # modifiedLine : List n . List n . n -> List n def modifiedLine(line, otherLine, coeff): def f(val, otherVal): return val - otherVal * coeff return zipWith(f, line, otherLine) # ==== next step # back in usePivot: # as previously mentioned, we used forEachLine over both # the pivotal and non pivotal lines, except the currently # pivotal line (the one one which we found the pivot we're # actually using). # # from then on, we `Maybe` build a new system, not forgetting # to stash the latest pivotal lines with all the old pivotal lines # # maybeSystem will thereafter test that no line is invalid (0 = x != 0). # ==== next step # back to echelonized: we get back the result of usePivot. if Nothing, # we return Nothing and therefore break the recursion. if not, # we check if we're at the last column of the matrix A in AX = Y, and # if so, we return the result of usePivot and break the recursion. # if not, we call `echelonized` recursively again with the justValue() of # the result of `usePivot`, and an incremented column index. # i deem the recursion acceptable even in Python because # nobody will ever use this program to solve a system # of several hundreds of variables, will they? # ==== next step: # we exited `echelonized`, so we're back in `systemSolution`, with either # Nothing (in which case we directly return Nothing) # or with `Just` an echelonized system. # in which case: we have to take care of an extreme, special case: # when both sides of the equation are null (a null matrix and vector). # cf the body of `systemSolution` for more details. # # from now on we'll assume the leftside is not a null matrix, # and therefore we at least found one pivot. # we call keepPivotalLines over the echelonized system. # keepPivotalLines : System n -> List (Row n) # # this function is extremely short and nearly useless, but is standing alone # for the sake of clarity. its purpose is to mark the moment when # we throw away the remaining nonPivotalLines, because they're # necessarily just full of zeroes (otherwise they'd either be invalid, # or would imply the Gauss Algorithm implemented here utterly failed somehow # along the way). # this, of course, should never happen. # the result of keepPivotalLines is thus `system.pivotalLines : List (Row n)` # keepPivotalLines : System n -> List (Row n) def keepPivotalLines(system): if not forall(system.nonPivotalLines, isNullVector): error(keepPivotalLines, "somehow after successfully echelonizing, " + "one non pivotal line is not full of zeroes") return system.pivotalLines # ==== next step: # once keepPivotalLines has been called, it's the turn of # normalized : List (Row n) -> List (Row n) # whose job is to normalize each pivotal line, aka to # multiply each pivotal line by a coefficient # so that each pivot (which is also the first non-zero value # on each line) take the value of 1. # normalized : List (Row n) -> List (Row n) def normalized(pivotalLines): return map(normalizedLine, pivotalLines) # normalized delegates all the work to map and to: # normalizedLine : Row n -> Row n # which replaces each value in the input # with itself divided by the pivot, which # is always the first non-zero value # encountered in the list. # normalizedLine : Row n -> Row n def normalizedLine(line): maybePivotIx = firstIndex(isNotZero, line) if maybePivotIx == Nothing: # the line is full of zeroes: # should never happen error(normalizedLine, "the line is full of zeroes") pivot = line[maybePivotIx.justValue()] if pivot != 1: return map(lambda val: val / pivot, line) else: return line # ==== next step: # now's the time to extract the solution from # the normalized pivotal lines. # extractSolution : List (Row n) -> Solution n # let us be reminded that: # Solution n = (Family n, Vector n) # that is, a solution is the couple composed of # the kernel basis, or [] if the kernel is {0} # and of the particular solution to the system. # # it's the trickiest part of the algorithm. # # we start by ungluing the leftside and rightside of # each line, to recuperate a left matrix and right vector, # using `splitSides` below. # # we obtain `p = ncols(leftMatrix)`, # then we get the column indices of each pivot in each pivotal line # (that will be the first non null value of each line). # we use it to get the column indices which *don't* contain # any pivot, with the help of `substractLists`. # # we then negate all the elements of the left matrix. # it also negates the (normalized) pivots but we don't # care because they'll soon be thrown away anyway. # # for each non pivotal column index: # we build a unit vector with a `1` at that # column index, with `p` coordinates. # we then insert that vector inside the left Matrix # and we of course insert a corresponding 0 in the # right side vector. # # this finally done, we only need to get the columns # of the left side matrix, minus the columns of the # pivots, and we get the basis of the kernel. # # the particular solution is just the right side vector. # # we thereafter return the tuple of both of them together, # and the algorithm is done. # extractSolution : List (Row n) -> Solution n def extractSolution(lines): (leftMatrix, rightVector) = unzipWith(splitSides, lines) p = ncols(leftMatrix) maybePivotalColIs = firstNonNullEachLine(leftMatrix) if maybePivotalColIs == Nothing: error(extractSolution, "one pivotal line is full of zeroes") pivotalColIs = maybePivotalColIs.justValue() nonPivotalColIs = subtractLists(range(p), pivotalColIs) leftMatrix = negate(leftMatrix) for index in nonPivotalColIs: uv = unitVector(p, index) leftMatrix.insert(index, uv) rightVector.insert(index, 0) #print "filled" #printMatrix(leftMatrix) kernelBasis = filterOutIx(pivotalColIs, familyFromMatrix(leftMatrix)) particularSolution = rightVector return (kernelBasis, particularSolution) def splitSides(line): leftSide = line[:-1] rightSide = line[-1] return (leftSide, rightSide)
maybeSystem
identifier_name
ecoliver.py
import numpy as np import scipy as sp from scipy import stats from scipy import linalg import scipy.ndimage as ndimage from datetime import date def find_nearest(array, value): idx=(np.abs(array-value)).argmin() return array[idx], idx def findxy(x, y, loc): ''' Find (i,j) coordinates of loc = (x0,y0) in 2D irregularly spaced coordinate matrices (2D numpy arrays) x and y. ''' # Dimensions Y, X = x.shape # Make matrix of indices iijj = np.meshgrid(range(X), range(Y)) ii = iijj[0].flatten() jj = iijj[1].flatten() # Calculate distance-squared dist2 = (x-loc[0])**2 + (y-loc[1])**2 k = np.argmin(dist2) return ii[k], jj[k] def nanmean(array, axis=None): return np.mean(np.ma.masked_array(array, np.isnan(array)), axis) def nanvar(array, axis=None): return np.var(np.ma.masked_array(array, np.isnan(array)), axis) def nanskew(array, axis=None): # only woks for 1D data return stats.skew(array[np.logical_not(np.isnan(array))]) def nanmax(array, axis=None): maxes = np.max(np.ma.masked_array(array, np.isnan(array)), axis) data = maxes.data mask = maxes.mask return data[np.logical_not(mask)] def nonans(array): ''' Return input array [1D numpy array] with all nan values removed ''' return array[~np.isnan(array)] def nozeros(array): ''' Return input array [1D numpy array] with all zeros removed ''' return array[~(array==0)] def latlon2km(lon1, lat1, lon2, lat2): EARTH_RADIUS = 6378.1 c = np.sin(np.radians(lat1)) * np.sin(np.radians(lat2)) + np.cos(np.radians(lon1-lon2)) * np.cos(np.radians(lat1)) * np.cos(np.radians(lat2)) d = EARTH_RADIUS * np.arccos(c) return d def latlonArea(lon1, lat1, lon2, lat2): ''' Surface area (in km2^) of a lat/lon "rectangle" included between specified longitudes and latitudes ''' EARTH_RADIUS = 6378.1 return (EARTH_RADIUS**2) * np.abs(lon1*np.pi/180. - lon2*np.pi/180.) * np.abs(np.sin(lat1*np.pi/180.) - np.sin(lat2*np.pi/180.)) def dxdy(lon, lat): ''' Takes M+1 length lat and N+1 length lon vectors and returns MxN 2D arrays of distances across cells in x and y directions ''' X = len(lon)-1 Y = len(lat)-1 dx = np.zeros((Y,X)) dy = np.zeros((Y,X)) for j in range(dx.shape[0]): for i in range(dx.shape[1]): dx[j,i] = 1e3 * latlon2km(lon[i+1], lat[j], lon[i], lat[j]) dy[j,i] = 1e3 * latlon2km(lon[i], lat[j+1], lon[i], lat[j]) return dx, dy def gradient(field, dx, dy): ''' Performs the gradient of input field given dx and dy fields (in metres) ''' field_y, field_x = np.gradient(field) field_x = field_x / dx field_y = field_y / dy return field_x, field_y def acf(x): result = np.correlate(x, x, mode = 'full') maxcorr = np.argmax(result) result = result / result[maxcorr] # <=== normalization return result[result.size/2:] def ccf(x, y): ''' Cross-Correlation Function +ve lags mean x leads y -ve lags mean x lags y ''' x = (x-np.mean(x))/(np.std(x)*len(x)) y = (y-np.mean(y))/np.std(y) ccf = np.correlate(x, y, mode='full') lags = np.arange(len(ccf)) - (len(x)-1) return lags, ccf def ttest_serialcorr(x, y): ''' Calculates the t-test for the means of two samples under an assumption of serial correlation, following the technique of Zwiers and von Storch (Journal of Climate, 1995) ''' # Valid (non-Nan) data, and return NaN if insufficient valid data validx = ~np.isnan(x) validy = ~np.isnan(y) if (validx.sum() <= 1) + (validy.sum() <= 1): return np.nan, np.nan else: # Sample lengths nx = len(x[validx]) ny = len(y[validy]) # Autocorrelation Function (pad NaN values for an approximation) rhox = acf(pad(x - np.nanmean(x))) rhoy = acf(pad(y - np.nanmean(y))) # Equivalent sample lengths nx = nx / (1 + ((1-np.arange(1, int(nx))/nx)*rhox[validx][:-1]).sum()) ny = ny / (1 + ((1-np.arange(1, int(ny))/ny)*rhoy[validy][:-1]).sum()) #if (nx < 30) or (ny < 30): # print 'Effective sample size(s) are less than 30: distribution of t statistics will deviate significantly from the t-distribution' # Sample standard deviations sx = np.sqrt(x[validx].var()) sy = np.sqrt(y[validy].var()) s = np.sqrt(sx**2/nx + sy**2/ny) # t-statistic t = (np.nanmean(x) - np.nanmean(y))/s # Degrees of freedom df = (sx**2/nx + sy**2/ny)**2 / ((sx**2/nx)**2/(nx-1) + (sy**2/ny)**2/(ny-1)) # p-value p = 1 - stats.t.cdf(t, df) return t, p def pad(data, maxPadLength=False): ''' Linearly interpolate over missing data (NaNs) in a time series. Inputs: data Time series [1D numpy array] maxPadLength Specifies the maximum length over which to interpolate, i.e., any consecutive blocks of NaNs with length greater than maxPadLength will be left as NaN. Set as an integer. maxPadLength=False (default) interpolates over all NaNs. Written by Eric Oliver, Institue for Marine and Antarctic Studies, University of Tasmania, Jun 2015 ''' data_padded = data.copy() if len(data) == np.isnan(data).sum(): return np.nan*data_padded else: bad_indexes = np.isnan(data) good_indexes = np.logical_not(bad_indexes) good_data = data[good_indexes] interpolated = np.interp(bad_indexes.nonzero()[0], good_indexes.nonzero()[0], good_data) data_padded[bad_indexes] = interpolated if maxPadLength: blocks, n_blocks = ndimage.label(np.isnan(data)) for bl in range(1, n_blocks+1): if (blocks==bl).sum() > maxPadLength: data_padded[blocks==bl] = np.nan return data_padded def runavg_periodic(ts, w): ''' Perform running average of ts (1D numpy array) using uniform window of width w (w must be odd). Assumes periodicity of ts. ''' N = len(ts) ts = np.append(ts, np.append(ts, ts)) ts_smooth = np.convolve(ts, np.ones(w)/w, mode='same') ts = ts_smooth[N:2*N] return ts def runavg(ts, w, mode='same'): ''' Perform running average of ts (1D numpy array) using uniform window of width w (w must be odd). Pads with NaNs outside of valid range. Option 'mode' specifies if output should be defined over ''' if mode == 'same': ts_smooth = np.convolve(ts, np.ones(w)/w, mode=mode) elif mode == 'valid': ts_smooth = np.append(np.append(np.nan*np.ones((w-1)/2), np.convolve(ts, np.ones(w)/w, mode=mode)), np.nan*np.ones((w-1)/2)) return ts_smooth def timevector(date_start, date_end): ''' Generated daily time vector, along with year, month, day, day-of-year, and full date information, given start and and date. Format is a 3-element list so that a start date of 3 May 2005 is specified date_start = [2005,5,3] Note that day-of year (doy) is [0 to 59, 61 to 366] for non-leap years and [0 to 366] for leap years. returns: t, dates, T, year, month, day, doy ''' # Time vector t = np.arange(date(date_start[0],date_start[1],date_start[2]).toordinal(),date(date_end[0],date_end[1],date_end[2]).toordinal()+1) T = len(t) # Date list dates = [date.fromordinal(tt.astype(int)) for tt in t] # Vectors for year, month, day-of-month year = np.zeros((T)) month = np.zeros((T)) day = np.zeros((T)) for tt in range(T): year[tt] = date.fromordinal(t[tt]).year month[tt] = date.fromordinal(t[tt]).month day[tt] = date.fromordinal(t[tt]).day year = year.astype(int) month = month.astype(int) day = day.astype(int) # Leap-year baseline for defining day-of-year values year_leapYear = 2012 # This year was a leap-year and therefore doy in range of 1 to 366 t_leapYear = np.arange(date(year_leapYear, 1, 1).toordinal(),date(year_leapYear, 12, 31).toordinal()+1) dates_leapYear = [date.fromordinal(tt.astype(int)) for tt in t_leapYear] month_leapYear = np.zeros((len(t_leapYear))) day_leapYear = np.zeros((len(t_leapYear))) doy_leapYear = np.zeros((len(t_leapYear))) for tt in range(len(t_leapYear)): month_leapYear[tt] = date.fromordinal(t_leapYear[tt]).month day_leapYear[tt] = date.fromordinal(t_leapYear[tt]).day doy_leapYear[tt] = t_leapYear[tt] - date(date.fromordinal(t_leapYear[tt]).year,1,1).toordinal() + 1 # Calculate day-of-year values doy = np.zeros((T)) for tt in range(T): doy[tt] = doy_leapYear[(month_leapYear == month[tt]) * (day_leapYear == day[tt])] doy = doy.astype(int) return t, dates, T, year, month, day, doy def spatial_filter(field, res, cut_lon, cut_lat): ''' Performs a spatial filter, removing all features with wavelenth scales larger than cut_lon in longitude and cut_lat in latitude from field. Field has spatial resolution of res and land identified by np.nan's ''' field_filt = np.zeros(field.shape) # see Chelton et al, Prog. Ocean., 2011 for explanation of factor of 1/5 sig_lon = (cut_lon/5.) / res sig_lat = (cut_lat/5.) / res land = np.isnan(field) field[land] = nanmean(field) field_filt = field - ndimage.gaussian_filter(field, [sig_lat, sig_lon]) field_filt[land] = np.nan return field_filt def trend(x, y, alpha=0.05): ''' Calculates the trend of y given the linear independent variable x. Outputs the mean, trend, and alpha-level (e.g., 0.05 for 95%) confidence limit on the trend. returns mean, trend, dtrend_95 ''' valid = ~np.isnan(y) if valid.sum() <= 1:
else: X = np.array([np.ones(len(x)), x-x.mean()]) beta = linalg.lstsq(X[:,valid].T, y[valid])[0] yhat = np.sum(beta*X.T, axis=1) t_stat = stats.t.isf(alpha/2, len(x[valid])-2) s = np.sqrt(np.sum((y[valid] - yhat[valid])**2) / (len(x[valid])-2)) Sxx = np.sum(X[1,valid]**2) - (np.sum(X[1,valid])**2)/len(x[valid]) # np.var(X, axis=1)[1] return beta[0], beta[1], t_stat * s / np.sqrt(Sxx) def trend_TheilSen(x, y, alpha=0.05): ''' Calculates the trend of y given the linear independent variable x. Outputs the mean, trend, and alpha-level (e.g., 0.05 for 95%) confidence limit on the trend. Estimate of the trend uses a Theil-Sen estimator. returns mean, trend, dtrend_95 ''' # Construct matrix of predictors, first column is all ones to estimate the mean, # second column is the time vector, equal to zero at mid-point. X = x-x.mean() # # Predictand (MHW property of interest) valid = ~np.isnan(y) # non-NaN indices # # Perform linear regression over valid indices if np.sum(~np.isnan(y)) > 0: # If at least one non-NaN value slope, y0, beta_lr, beta_up = stats.mstats.theilslopes(y[valid], X[valid], alpha=1-alpha) beta = np.array([y0, slope]) else: beta_lr, beta_up = [np.nan, np.nan] beta = [np.nan, np.nan] # return beta[0], beta[1], [beta_lr, beta_up] def acf(x): result = np.correlate(x, x, mode = 'full') maxcorr = np.argmax(result) result = result / result[maxcorr] # <=== normalization return result[result.size/2:] def ttest_unequalvar(x, y): ''' Calculates the t-test for the means of two samples under an assumption of no serial correlation (but different variances), following the technique of Zwiers and von Storch (Journal of Climate, 1995) ''' # Sample lengths nx = len(x) ny = len(y) # Sample standard deviations sx = np.sqrt(x.var()) sy = np.sqrt(y.var()) s = np.sqrt(sx**2/nx + sy**2/ny) # t-statistic t = np.abs(x.mean() - y.mean())/s # Degrees of freedom df = (sx**2/nx + sy**2/ny)**2 / ((sx**2/nx)**2/(nx-1) + (sy**2/ny)**2/(ny-1)) # p-value p = 1 - stats.t.cdf(t, df) return t, p def pattern_correlation(x1, x2, centred=True): ''' Calculates the pattern correlation of x1 and x2. Assumes and x1 and x2 are 2D numpy arrays. Can handle missing values, even if missing values are distributed differently in x1 and x2. By default calculated the centred pattern correlation (centred =True) in which the spatial means of x1 and x2 are removed prior to calculation. Can calculated uncentred pattern correlation (centred=False) in which these means are not removed. . Written by Eric Oliver, IMAS/UTAS, Nov 2015 ''' # Flatten 2D arrays and find shared valid (non-nan) indices X1 = x1.flatten() X2 = x2.flatten() valid = ~(np.isnan(X1) + np.isnan(X2)) # Create Nx2 array of valid data X = np.zeros((valid.sum(), 2)) X[:,0] = X1[valid] X[:,1] = X2[valid] # Centre data if desired if centred: X[:,0] = X[:,0] - np.mean(X[:,0]) X[:,1] = X[:,1] - np.mean(X[:,1]) # # Calculate pattern correlation pcorr = np.corrcoef(X.T)[0,1] return pcorr def polyArea(x, y): ''' Area of a simple polygon, defined by vertices (x,y) in the plane. Assumes x and y and numpy arrays of the same length. Algorithm is based on the Shoelace Formula https://en.wikipedia.org/wiki/Shoelace_formula http://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates ''' return 0.5*np.abs(np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1))) def point_inside_polygon(x, y, poly): ''' Determine if a point is inside a given polygon or not Polygon is a list of (x,y) pairs. http://www.ariel.com.au/a/python-point-int-poly.html ''' n = len(poly) inside =False # p1x, p1y = poly[0] for i in range(n+1): p2x, p2y = poly[i % n] if y > min(p1y, p2y): if y <= max(p1y, p2y): if x <= max(p1x, p2x): if p1y != p2y: xinters = (y - p1y)*(p2x - p1x)/(p2y - p1y)+p1x if p1x == p2x or x <= xinters: inside = not inside p1x, p1y = p2x, p2y # return inside def pAgree(k, n, p=0.5): ''' Returns the significance level of agreement of k results across n datasets, based on a binomial distribution. Assumes a "fair coin toss" (p=0.5) but this can be optionally changed. For example, if k=9 out of n=10 datasets show a trend of the same sign, this returns the significance level for such a result, assuming independence of the datasets (ha!) and a probability of a trend with that sign of p=0.5. ''' return stats.binom.pmf(np.arange(k,n+1), n, p).sum() def weighted_quantile(values, quantiles, sample_weight=None, values_sorted=False, old_style=False): """ Very close to numpy.percentile, but supports weights. NOTE: quantiles should be in [0, 1]! :param values: numpy.array with data :param quantiles: array-like with many quantiles needed :param sample_weight: array-like of the same length as `array` :param values_sorted: bool, if True, then will avoid sorting of initial array :param old_style: if True, will correct output to be consistent with numpy.percentile. :return: numpy.array with computed quantiles. Authored by Alleo, Apr 16 '15 at 14:22 Source: https://stackoverflow.com/questions/21844024/weighted-percentile-using-numpy """ values = np.array(values) quantiles = np.array(quantiles) if sample_weight is None: sample_weight = np.ones(len(values)) sample_weight = np.array(sample_weight) assert np.all(quantiles >= 0) and np.all(quantiles <= 1), 'quantiles should be in [0, 1]' if not values_sorted: sorter = np.argsort(values) values = values[sorter] sample_weight = sample_weight[sorter] weighted_quantiles = np.cumsum(sample_weight) - 0.5 * sample_weight if old_style: # To be convenient with numpy.percentile weighted_quantiles -= weighted_quantiles[0] weighted_quantiles /= weighted_quantiles[-1] else: weighted_quantiles /= np.sum(sample_weight) return np.interp(quantiles, weighted_quantiles, values) def gaussianKernelSum(X, Y, Z, std): ''' Estimates the 2D probability distribution of scattered data Z on a regular grid (X, Y). Does so by summing Gaussian pdfs centred on each location Z with variance std[0] in the X dimension and STD[1] in the Y dimention. X, Y Matrices of coordinates, in np.meshgrid format Z (2 x N) array of data points, where N is the number of points. The first index of dimension zero corresponds to X, while the second index corresponds to Y. std Standard deviation of Gaussians summed to generate pdf. List of length 2. ''' # Initialize pdf to zero pdf = np.zeros(X.shape) # Add to it a gaussian centred on each of the data points, with the appropriate variance for i in range(Z.shape[1]): pdf += (1./(2*np.pi*std[0]*std[1]))*np.exp(-0.5*(X - Z[0,i])**2/std[0]**2 - 0.5*(Y - Z[1,i])**2/std[1]**2) # return pdf
return np.nan, np.nan, np.nan
conditional_block
ecoliver.py
import numpy as np import scipy as sp from scipy import stats from scipy import linalg import scipy.ndimage as ndimage from datetime import date def find_nearest(array, value): idx=(np.abs(array-value)).argmin() return array[idx], idx def findxy(x, y, loc): ''' Find (i,j) coordinates of loc = (x0,y0) in 2D irregularly spaced coordinate matrices (2D numpy arrays) x and y. ''' # Dimensions Y, X = x.shape # Make matrix of indices iijj = np.meshgrid(range(X), range(Y)) ii = iijj[0].flatten() jj = iijj[1].flatten() # Calculate distance-squared dist2 = (x-loc[0])**2 + (y-loc[1])**2 k = np.argmin(dist2) return ii[k], jj[k] def nanmean(array, axis=None): return np.mean(np.ma.masked_array(array, np.isnan(array)), axis) def nanvar(array, axis=None): return np.var(np.ma.masked_array(array, np.isnan(array)), axis) def nanskew(array, axis=None): # only woks for 1D data return stats.skew(array[np.logical_not(np.isnan(array))]) def nanmax(array, axis=None): maxes = np.max(np.ma.masked_array(array, np.isnan(array)), axis) data = maxes.data mask = maxes.mask return data[np.logical_not(mask)] def nonans(array): ''' Return input array [1D numpy array] with all nan values removed ''' return array[~np.isnan(array)] def nozeros(array): ''' Return input array [1D numpy array] with all zeros removed ''' return array[~(array==0)] def latlon2km(lon1, lat1, lon2, lat2): EARTH_RADIUS = 6378.1 c = np.sin(np.radians(lat1)) * np.sin(np.radians(lat2)) + np.cos(np.radians(lon1-lon2)) * np.cos(np.radians(lat1)) * np.cos(np.radians(lat2)) d = EARTH_RADIUS * np.arccos(c) return d def latlonArea(lon1, lat1, lon2, lat2): ''' Surface area (in km2^) of a lat/lon "rectangle" included between specified longitudes and latitudes ''' EARTH_RADIUS = 6378.1 return (EARTH_RADIUS**2) * np.abs(lon1*np.pi/180. - lon2*np.pi/180.) * np.abs(np.sin(lat1*np.pi/180.) - np.sin(lat2*np.pi/180.)) def dxdy(lon, lat): ''' Takes M+1 length lat and N+1 length lon vectors and returns MxN 2D arrays of distances across cells in x and y directions ''' X = len(lon)-1 Y = len(lat)-1 dx = np.zeros((Y,X)) dy = np.zeros((Y,X)) for j in range(dx.shape[0]): for i in range(dx.shape[1]): dx[j,i] = 1e3 * latlon2km(lon[i+1], lat[j], lon[i], lat[j]) dy[j,i] = 1e3 * latlon2km(lon[i], lat[j+1], lon[i], lat[j]) return dx, dy def gradient(field, dx, dy): ''' Performs the gradient of input field given dx and dy fields (in metres) ''' field_y, field_x = np.gradient(field) field_x = field_x / dx field_y = field_y / dy return field_x, field_y def acf(x): result = np.correlate(x, x, mode = 'full') maxcorr = np.argmax(result) result = result / result[maxcorr] # <=== normalization return result[result.size/2:] def ccf(x, y): ''' Cross-Correlation Function +ve lags mean x leads y -ve lags mean x lags y ''' x = (x-np.mean(x))/(np.std(x)*len(x)) y = (y-np.mean(y))/np.std(y) ccf = np.correlate(x, y, mode='full') lags = np.arange(len(ccf)) - (len(x)-1) return lags, ccf def ttest_serialcorr(x, y): ''' Calculates the t-test for the means of two samples under an assumption of serial correlation, following the technique of Zwiers and von Storch (Journal of Climate, 1995) ''' # Valid (non-Nan) data, and return NaN if insufficient valid data validx = ~np.isnan(x) validy = ~np.isnan(y) if (validx.sum() <= 1) + (validy.sum() <= 1): return np.nan, np.nan else: # Sample lengths nx = len(x[validx]) ny = len(y[validy]) # Autocorrelation Function (pad NaN values for an approximation) rhox = acf(pad(x - np.nanmean(x))) rhoy = acf(pad(y - np.nanmean(y))) # Equivalent sample lengths nx = nx / (1 + ((1-np.arange(1, int(nx))/nx)*rhox[validx][:-1]).sum()) ny = ny / (1 + ((1-np.arange(1, int(ny))/ny)*rhoy[validy][:-1]).sum()) #if (nx < 30) or (ny < 30): # print 'Effective sample size(s) are less than 30: distribution of t statistics will deviate significantly from the t-distribution' # Sample standard deviations sx = np.sqrt(x[validx].var())
# t-statistic t = (np.nanmean(x) - np.nanmean(y))/s # Degrees of freedom df = (sx**2/nx + sy**2/ny)**2 / ((sx**2/nx)**2/(nx-1) + (sy**2/ny)**2/(ny-1)) # p-value p = 1 - stats.t.cdf(t, df) return t, p def pad(data, maxPadLength=False): ''' Linearly interpolate over missing data (NaNs) in a time series. Inputs: data Time series [1D numpy array] maxPadLength Specifies the maximum length over which to interpolate, i.e., any consecutive blocks of NaNs with length greater than maxPadLength will be left as NaN. Set as an integer. maxPadLength=False (default) interpolates over all NaNs. Written by Eric Oliver, Institue for Marine and Antarctic Studies, University of Tasmania, Jun 2015 ''' data_padded = data.copy() if len(data) == np.isnan(data).sum(): return np.nan*data_padded else: bad_indexes = np.isnan(data) good_indexes = np.logical_not(bad_indexes) good_data = data[good_indexes] interpolated = np.interp(bad_indexes.nonzero()[0], good_indexes.nonzero()[0], good_data) data_padded[bad_indexes] = interpolated if maxPadLength: blocks, n_blocks = ndimage.label(np.isnan(data)) for bl in range(1, n_blocks+1): if (blocks==bl).sum() > maxPadLength: data_padded[blocks==bl] = np.nan return data_padded def runavg_periodic(ts, w): ''' Perform running average of ts (1D numpy array) using uniform window of width w (w must be odd). Assumes periodicity of ts. ''' N = len(ts) ts = np.append(ts, np.append(ts, ts)) ts_smooth = np.convolve(ts, np.ones(w)/w, mode='same') ts = ts_smooth[N:2*N] return ts def runavg(ts, w, mode='same'): ''' Perform running average of ts (1D numpy array) using uniform window of width w (w must be odd). Pads with NaNs outside of valid range. Option 'mode' specifies if output should be defined over ''' if mode == 'same': ts_smooth = np.convolve(ts, np.ones(w)/w, mode=mode) elif mode == 'valid': ts_smooth = np.append(np.append(np.nan*np.ones((w-1)/2), np.convolve(ts, np.ones(w)/w, mode=mode)), np.nan*np.ones((w-1)/2)) return ts_smooth def timevector(date_start, date_end): ''' Generated daily time vector, along with year, month, day, day-of-year, and full date information, given start and and date. Format is a 3-element list so that a start date of 3 May 2005 is specified date_start = [2005,5,3] Note that day-of year (doy) is [0 to 59, 61 to 366] for non-leap years and [0 to 366] for leap years. returns: t, dates, T, year, month, day, doy ''' # Time vector t = np.arange(date(date_start[0],date_start[1],date_start[2]).toordinal(),date(date_end[0],date_end[1],date_end[2]).toordinal()+1) T = len(t) # Date list dates = [date.fromordinal(tt.astype(int)) for tt in t] # Vectors for year, month, day-of-month year = np.zeros((T)) month = np.zeros((T)) day = np.zeros((T)) for tt in range(T): year[tt] = date.fromordinal(t[tt]).year month[tt] = date.fromordinal(t[tt]).month day[tt] = date.fromordinal(t[tt]).day year = year.astype(int) month = month.astype(int) day = day.astype(int) # Leap-year baseline for defining day-of-year values year_leapYear = 2012 # This year was a leap-year and therefore doy in range of 1 to 366 t_leapYear = np.arange(date(year_leapYear, 1, 1).toordinal(),date(year_leapYear, 12, 31).toordinal()+1) dates_leapYear = [date.fromordinal(tt.astype(int)) for tt in t_leapYear] month_leapYear = np.zeros((len(t_leapYear))) day_leapYear = np.zeros((len(t_leapYear))) doy_leapYear = np.zeros((len(t_leapYear))) for tt in range(len(t_leapYear)): month_leapYear[tt] = date.fromordinal(t_leapYear[tt]).month day_leapYear[tt] = date.fromordinal(t_leapYear[tt]).day doy_leapYear[tt] = t_leapYear[tt] - date(date.fromordinal(t_leapYear[tt]).year,1,1).toordinal() + 1 # Calculate day-of-year values doy = np.zeros((T)) for tt in range(T): doy[tt] = doy_leapYear[(month_leapYear == month[tt]) * (day_leapYear == day[tt])] doy = doy.astype(int) return t, dates, T, year, month, day, doy def spatial_filter(field, res, cut_lon, cut_lat): ''' Performs a spatial filter, removing all features with wavelenth scales larger than cut_lon in longitude and cut_lat in latitude from field. Field has spatial resolution of res and land identified by np.nan's ''' field_filt = np.zeros(field.shape) # see Chelton et al, Prog. Ocean., 2011 for explanation of factor of 1/5 sig_lon = (cut_lon/5.) / res sig_lat = (cut_lat/5.) / res land = np.isnan(field) field[land] = nanmean(field) field_filt = field - ndimage.gaussian_filter(field, [sig_lat, sig_lon]) field_filt[land] = np.nan return field_filt def trend(x, y, alpha=0.05): ''' Calculates the trend of y given the linear independent variable x. Outputs the mean, trend, and alpha-level (e.g., 0.05 for 95%) confidence limit on the trend. returns mean, trend, dtrend_95 ''' valid = ~np.isnan(y) if valid.sum() <= 1: return np.nan, np.nan, np.nan else: X = np.array([np.ones(len(x)), x-x.mean()]) beta = linalg.lstsq(X[:,valid].T, y[valid])[0] yhat = np.sum(beta*X.T, axis=1) t_stat = stats.t.isf(alpha/2, len(x[valid])-2) s = np.sqrt(np.sum((y[valid] - yhat[valid])**2) / (len(x[valid])-2)) Sxx = np.sum(X[1,valid]**2) - (np.sum(X[1,valid])**2)/len(x[valid]) # np.var(X, axis=1)[1] return beta[0], beta[1], t_stat * s / np.sqrt(Sxx) def trend_TheilSen(x, y, alpha=0.05): ''' Calculates the trend of y given the linear independent variable x. Outputs the mean, trend, and alpha-level (e.g., 0.05 for 95%) confidence limit on the trend. Estimate of the trend uses a Theil-Sen estimator. returns mean, trend, dtrend_95 ''' # Construct matrix of predictors, first column is all ones to estimate the mean, # second column is the time vector, equal to zero at mid-point. X = x-x.mean() # # Predictand (MHW property of interest) valid = ~np.isnan(y) # non-NaN indices # # Perform linear regression over valid indices if np.sum(~np.isnan(y)) > 0: # If at least one non-NaN value slope, y0, beta_lr, beta_up = stats.mstats.theilslopes(y[valid], X[valid], alpha=1-alpha) beta = np.array([y0, slope]) else: beta_lr, beta_up = [np.nan, np.nan] beta = [np.nan, np.nan] # return beta[0], beta[1], [beta_lr, beta_up] def acf(x): result = np.correlate(x, x, mode = 'full') maxcorr = np.argmax(result) result = result / result[maxcorr] # <=== normalization return result[result.size/2:] def ttest_unequalvar(x, y): ''' Calculates the t-test for the means of two samples under an assumption of no serial correlation (but different variances), following the technique of Zwiers and von Storch (Journal of Climate, 1995) ''' # Sample lengths nx = len(x) ny = len(y) # Sample standard deviations sx = np.sqrt(x.var()) sy = np.sqrt(y.var()) s = np.sqrt(sx**2/nx + sy**2/ny) # t-statistic t = np.abs(x.mean() - y.mean())/s # Degrees of freedom df = (sx**2/nx + sy**2/ny)**2 / ((sx**2/nx)**2/(nx-1) + (sy**2/ny)**2/(ny-1)) # p-value p = 1 - stats.t.cdf(t, df) return t, p def pattern_correlation(x1, x2, centred=True): ''' Calculates the pattern correlation of x1 and x2. Assumes and x1 and x2 are 2D numpy arrays. Can handle missing values, even if missing values are distributed differently in x1 and x2. By default calculated the centred pattern correlation (centred =True) in which the spatial means of x1 and x2 are removed prior to calculation. Can calculated uncentred pattern correlation (centred=False) in which these means are not removed. . Written by Eric Oliver, IMAS/UTAS, Nov 2015 ''' # Flatten 2D arrays and find shared valid (non-nan) indices X1 = x1.flatten() X2 = x2.flatten() valid = ~(np.isnan(X1) + np.isnan(X2)) # Create Nx2 array of valid data X = np.zeros((valid.sum(), 2)) X[:,0] = X1[valid] X[:,1] = X2[valid] # Centre data if desired if centred: X[:,0] = X[:,0] - np.mean(X[:,0]) X[:,1] = X[:,1] - np.mean(X[:,1]) # # Calculate pattern correlation pcorr = np.corrcoef(X.T)[0,1] return pcorr def polyArea(x, y): ''' Area of a simple polygon, defined by vertices (x,y) in the plane. Assumes x and y and numpy arrays of the same length. Algorithm is based on the Shoelace Formula https://en.wikipedia.org/wiki/Shoelace_formula http://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates ''' return 0.5*np.abs(np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1))) def point_inside_polygon(x, y, poly): ''' Determine if a point is inside a given polygon or not Polygon is a list of (x,y) pairs. http://www.ariel.com.au/a/python-point-int-poly.html ''' n = len(poly) inside =False # p1x, p1y = poly[0] for i in range(n+1): p2x, p2y = poly[i % n] if y > min(p1y, p2y): if y <= max(p1y, p2y): if x <= max(p1x, p2x): if p1y != p2y: xinters = (y - p1y)*(p2x - p1x)/(p2y - p1y)+p1x if p1x == p2x or x <= xinters: inside = not inside p1x, p1y = p2x, p2y # return inside def pAgree(k, n, p=0.5): ''' Returns the significance level of agreement of k results across n datasets, based on a binomial distribution. Assumes a "fair coin toss" (p=0.5) but this can be optionally changed. For example, if k=9 out of n=10 datasets show a trend of the same sign, this returns the significance level for such a result, assuming independence of the datasets (ha!) and a probability of a trend with that sign of p=0.5. ''' return stats.binom.pmf(np.arange(k,n+1), n, p).sum() def weighted_quantile(values, quantiles, sample_weight=None, values_sorted=False, old_style=False): """ Very close to numpy.percentile, but supports weights. NOTE: quantiles should be in [0, 1]! :param values: numpy.array with data :param quantiles: array-like with many quantiles needed :param sample_weight: array-like of the same length as `array` :param values_sorted: bool, if True, then will avoid sorting of initial array :param old_style: if True, will correct output to be consistent with numpy.percentile. :return: numpy.array with computed quantiles. Authored by Alleo, Apr 16 '15 at 14:22 Source: https://stackoverflow.com/questions/21844024/weighted-percentile-using-numpy """ values = np.array(values) quantiles = np.array(quantiles) if sample_weight is None: sample_weight = np.ones(len(values)) sample_weight = np.array(sample_weight) assert np.all(quantiles >= 0) and np.all(quantiles <= 1), 'quantiles should be in [0, 1]' if not values_sorted: sorter = np.argsort(values) values = values[sorter] sample_weight = sample_weight[sorter] weighted_quantiles = np.cumsum(sample_weight) - 0.5 * sample_weight if old_style: # To be convenient with numpy.percentile weighted_quantiles -= weighted_quantiles[0] weighted_quantiles /= weighted_quantiles[-1] else: weighted_quantiles /= np.sum(sample_weight) return np.interp(quantiles, weighted_quantiles, values) def gaussianKernelSum(X, Y, Z, std): ''' Estimates the 2D probability distribution of scattered data Z on a regular grid (X, Y). Does so by summing Gaussian pdfs centred on each location Z with variance std[0] in the X dimension and STD[1] in the Y dimention. X, Y Matrices of coordinates, in np.meshgrid format Z (2 x N) array of data points, where N is the number of points. The first index of dimension zero corresponds to X, while the second index corresponds to Y. std Standard deviation of Gaussians summed to generate pdf. List of length 2. ''' # Initialize pdf to zero pdf = np.zeros(X.shape) # Add to it a gaussian centred on each of the data points, with the appropriate variance for i in range(Z.shape[1]): pdf += (1./(2*np.pi*std[0]*std[1]))*np.exp(-0.5*(X - Z[0,i])**2/std[0]**2 - 0.5*(Y - Z[1,i])**2/std[1]**2) # return pdf
sy = np.sqrt(y[validy].var()) s = np.sqrt(sx**2/nx + sy**2/ny)
random_line_split
ecoliver.py
import numpy as np import scipy as sp from scipy import stats from scipy import linalg import scipy.ndimage as ndimage from datetime import date def find_nearest(array, value): idx=(np.abs(array-value)).argmin() return array[idx], idx def findxy(x, y, loc): ''' Find (i,j) coordinates of loc = (x0,y0) in 2D irregularly spaced coordinate matrices (2D numpy arrays) x and y. ''' # Dimensions Y, X = x.shape # Make matrix of indices iijj = np.meshgrid(range(X), range(Y)) ii = iijj[0].flatten() jj = iijj[1].flatten() # Calculate distance-squared dist2 = (x-loc[0])**2 + (y-loc[1])**2 k = np.argmin(dist2) return ii[k], jj[k] def nanmean(array, axis=None): return np.mean(np.ma.masked_array(array, np.isnan(array)), axis) def nanvar(array, axis=None): return np.var(np.ma.masked_array(array, np.isnan(array)), axis) def nanskew(array, axis=None): # only woks for 1D data return stats.skew(array[np.logical_not(np.isnan(array))]) def nanmax(array, axis=None): maxes = np.max(np.ma.masked_array(array, np.isnan(array)), axis) data = maxes.data mask = maxes.mask return data[np.logical_not(mask)] def nonans(array): ''' Return input array [1D numpy array] with all nan values removed ''' return array[~np.isnan(array)] def nozeros(array): ''' Return input array [1D numpy array] with all zeros removed ''' return array[~(array==0)] def latlon2km(lon1, lat1, lon2, lat2): EARTH_RADIUS = 6378.1 c = np.sin(np.radians(lat1)) * np.sin(np.radians(lat2)) + np.cos(np.radians(lon1-lon2)) * np.cos(np.radians(lat1)) * np.cos(np.radians(lat2)) d = EARTH_RADIUS * np.arccos(c) return d def latlonArea(lon1, lat1, lon2, lat2): ''' Surface area (in km2^) of a lat/lon "rectangle" included between specified longitudes and latitudes ''' EARTH_RADIUS = 6378.1 return (EARTH_RADIUS**2) * np.abs(lon1*np.pi/180. - lon2*np.pi/180.) * np.abs(np.sin(lat1*np.pi/180.) - np.sin(lat2*np.pi/180.)) def dxdy(lon, lat): ''' Takes M+1 length lat and N+1 length lon vectors and returns MxN 2D arrays of distances across cells in x and y directions ''' X = len(lon)-1 Y = len(lat)-1 dx = np.zeros((Y,X)) dy = np.zeros((Y,X)) for j in range(dx.shape[0]): for i in range(dx.shape[1]): dx[j,i] = 1e3 * latlon2km(lon[i+1], lat[j], lon[i], lat[j]) dy[j,i] = 1e3 * latlon2km(lon[i], lat[j+1], lon[i], lat[j]) return dx, dy def gradient(field, dx, dy): ''' Performs the gradient of input field given dx and dy fields (in metres) ''' field_y, field_x = np.gradient(field) field_x = field_x / dx field_y = field_y / dy return field_x, field_y def acf(x): result = np.correlate(x, x, mode = 'full') maxcorr = np.argmax(result) result = result / result[maxcorr] # <=== normalization return result[result.size/2:] def ccf(x, y): ''' Cross-Correlation Function +ve lags mean x leads y -ve lags mean x lags y ''' x = (x-np.mean(x))/(np.std(x)*len(x)) y = (y-np.mean(y))/np.std(y) ccf = np.correlate(x, y, mode='full') lags = np.arange(len(ccf)) - (len(x)-1) return lags, ccf def
(x, y): ''' Calculates the t-test for the means of two samples under an assumption of serial correlation, following the technique of Zwiers and von Storch (Journal of Climate, 1995) ''' # Valid (non-Nan) data, and return NaN if insufficient valid data validx = ~np.isnan(x) validy = ~np.isnan(y) if (validx.sum() <= 1) + (validy.sum() <= 1): return np.nan, np.nan else: # Sample lengths nx = len(x[validx]) ny = len(y[validy]) # Autocorrelation Function (pad NaN values for an approximation) rhox = acf(pad(x - np.nanmean(x))) rhoy = acf(pad(y - np.nanmean(y))) # Equivalent sample lengths nx = nx / (1 + ((1-np.arange(1, int(nx))/nx)*rhox[validx][:-1]).sum()) ny = ny / (1 + ((1-np.arange(1, int(ny))/ny)*rhoy[validy][:-1]).sum()) #if (nx < 30) or (ny < 30): # print 'Effective sample size(s) are less than 30: distribution of t statistics will deviate significantly from the t-distribution' # Sample standard deviations sx = np.sqrt(x[validx].var()) sy = np.sqrt(y[validy].var()) s = np.sqrt(sx**2/nx + sy**2/ny) # t-statistic t = (np.nanmean(x) - np.nanmean(y))/s # Degrees of freedom df = (sx**2/nx + sy**2/ny)**2 / ((sx**2/nx)**2/(nx-1) + (sy**2/ny)**2/(ny-1)) # p-value p = 1 - stats.t.cdf(t, df) return t, p def pad(data, maxPadLength=False): ''' Linearly interpolate over missing data (NaNs) in a time series. Inputs: data Time series [1D numpy array] maxPadLength Specifies the maximum length over which to interpolate, i.e., any consecutive blocks of NaNs with length greater than maxPadLength will be left as NaN. Set as an integer. maxPadLength=False (default) interpolates over all NaNs. Written by Eric Oliver, Institue for Marine and Antarctic Studies, University of Tasmania, Jun 2015 ''' data_padded = data.copy() if len(data) == np.isnan(data).sum(): return np.nan*data_padded else: bad_indexes = np.isnan(data) good_indexes = np.logical_not(bad_indexes) good_data = data[good_indexes] interpolated = np.interp(bad_indexes.nonzero()[0], good_indexes.nonzero()[0], good_data) data_padded[bad_indexes] = interpolated if maxPadLength: blocks, n_blocks = ndimage.label(np.isnan(data)) for bl in range(1, n_blocks+1): if (blocks==bl).sum() > maxPadLength: data_padded[blocks==bl] = np.nan return data_padded def runavg_periodic(ts, w): ''' Perform running average of ts (1D numpy array) using uniform window of width w (w must be odd). Assumes periodicity of ts. ''' N = len(ts) ts = np.append(ts, np.append(ts, ts)) ts_smooth = np.convolve(ts, np.ones(w)/w, mode='same') ts = ts_smooth[N:2*N] return ts def runavg(ts, w, mode='same'): ''' Perform running average of ts (1D numpy array) using uniform window of width w (w must be odd). Pads with NaNs outside of valid range. Option 'mode' specifies if output should be defined over ''' if mode == 'same': ts_smooth = np.convolve(ts, np.ones(w)/w, mode=mode) elif mode == 'valid': ts_smooth = np.append(np.append(np.nan*np.ones((w-1)/2), np.convolve(ts, np.ones(w)/w, mode=mode)), np.nan*np.ones((w-1)/2)) return ts_smooth def timevector(date_start, date_end): ''' Generated daily time vector, along with year, month, day, day-of-year, and full date information, given start and and date. Format is a 3-element list so that a start date of 3 May 2005 is specified date_start = [2005,5,3] Note that day-of year (doy) is [0 to 59, 61 to 366] for non-leap years and [0 to 366] for leap years. returns: t, dates, T, year, month, day, doy ''' # Time vector t = np.arange(date(date_start[0],date_start[1],date_start[2]).toordinal(),date(date_end[0],date_end[1],date_end[2]).toordinal()+1) T = len(t) # Date list dates = [date.fromordinal(tt.astype(int)) for tt in t] # Vectors for year, month, day-of-month year = np.zeros((T)) month = np.zeros((T)) day = np.zeros((T)) for tt in range(T): year[tt] = date.fromordinal(t[tt]).year month[tt] = date.fromordinal(t[tt]).month day[tt] = date.fromordinal(t[tt]).day year = year.astype(int) month = month.astype(int) day = day.astype(int) # Leap-year baseline for defining day-of-year values year_leapYear = 2012 # This year was a leap-year and therefore doy in range of 1 to 366 t_leapYear = np.arange(date(year_leapYear, 1, 1).toordinal(),date(year_leapYear, 12, 31).toordinal()+1) dates_leapYear = [date.fromordinal(tt.astype(int)) for tt in t_leapYear] month_leapYear = np.zeros((len(t_leapYear))) day_leapYear = np.zeros((len(t_leapYear))) doy_leapYear = np.zeros((len(t_leapYear))) for tt in range(len(t_leapYear)): month_leapYear[tt] = date.fromordinal(t_leapYear[tt]).month day_leapYear[tt] = date.fromordinal(t_leapYear[tt]).day doy_leapYear[tt] = t_leapYear[tt] - date(date.fromordinal(t_leapYear[tt]).year,1,1).toordinal() + 1 # Calculate day-of-year values doy = np.zeros((T)) for tt in range(T): doy[tt] = doy_leapYear[(month_leapYear == month[tt]) * (day_leapYear == day[tt])] doy = doy.astype(int) return t, dates, T, year, month, day, doy def spatial_filter(field, res, cut_lon, cut_lat): ''' Performs a spatial filter, removing all features with wavelenth scales larger than cut_lon in longitude and cut_lat in latitude from field. Field has spatial resolution of res and land identified by np.nan's ''' field_filt = np.zeros(field.shape) # see Chelton et al, Prog. Ocean., 2011 for explanation of factor of 1/5 sig_lon = (cut_lon/5.) / res sig_lat = (cut_lat/5.) / res land = np.isnan(field) field[land] = nanmean(field) field_filt = field - ndimage.gaussian_filter(field, [sig_lat, sig_lon]) field_filt[land] = np.nan return field_filt def trend(x, y, alpha=0.05): ''' Calculates the trend of y given the linear independent variable x. Outputs the mean, trend, and alpha-level (e.g., 0.05 for 95%) confidence limit on the trend. returns mean, trend, dtrend_95 ''' valid = ~np.isnan(y) if valid.sum() <= 1: return np.nan, np.nan, np.nan else: X = np.array([np.ones(len(x)), x-x.mean()]) beta = linalg.lstsq(X[:,valid].T, y[valid])[0] yhat = np.sum(beta*X.T, axis=1) t_stat = stats.t.isf(alpha/2, len(x[valid])-2) s = np.sqrt(np.sum((y[valid] - yhat[valid])**2) / (len(x[valid])-2)) Sxx = np.sum(X[1,valid]**2) - (np.sum(X[1,valid])**2)/len(x[valid]) # np.var(X, axis=1)[1] return beta[0], beta[1], t_stat * s / np.sqrt(Sxx) def trend_TheilSen(x, y, alpha=0.05): ''' Calculates the trend of y given the linear independent variable x. Outputs the mean, trend, and alpha-level (e.g., 0.05 for 95%) confidence limit on the trend. Estimate of the trend uses a Theil-Sen estimator. returns mean, trend, dtrend_95 ''' # Construct matrix of predictors, first column is all ones to estimate the mean, # second column is the time vector, equal to zero at mid-point. X = x-x.mean() # # Predictand (MHW property of interest) valid = ~np.isnan(y) # non-NaN indices # # Perform linear regression over valid indices if np.sum(~np.isnan(y)) > 0: # If at least one non-NaN value slope, y0, beta_lr, beta_up = stats.mstats.theilslopes(y[valid], X[valid], alpha=1-alpha) beta = np.array([y0, slope]) else: beta_lr, beta_up = [np.nan, np.nan] beta = [np.nan, np.nan] # return beta[0], beta[1], [beta_lr, beta_up] def acf(x): result = np.correlate(x, x, mode = 'full') maxcorr = np.argmax(result) result = result / result[maxcorr] # <=== normalization return result[result.size/2:] def ttest_unequalvar(x, y): ''' Calculates the t-test for the means of two samples under an assumption of no serial correlation (but different variances), following the technique of Zwiers and von Storch (Journal of Climate, 1995) ''' # Sample lengths nx = len(x) ny = len(y) # Sample standard deviations sx = np.sqrt(x.var()) sy = np.sqrt(y.var()) s = np.sqrt(sx**2/nx + sy**2/ny) # t-statistic t = np.abs(x.mean() - y.mean())/s # Degrees of freedom df = (sx**2/nx + sy**2/ny)**2 / ((sx**2/nx)**2/(nx-1) + (sy**2/ny)**2/(ny-1)) # p-value p = 1 - stats.t.cdf(t, df) return t, p def pattern_correlation(x1, x2, centred=True): ''' Calculates the pattern correlation of x1 and x2. Assumes and x1 and x2 are 2D numpy arrays. Can handle missing values, even if missing values are distributed differently in x1 and x2. By default calculated the centred pattern correlation (centred =True) in which the spatial means of x1 and x2 are removed prior to calculation. Can calculated uncentred pattern correlation (centred=False) in which these means are not removed. . Written by Eric Oliver, IMAS/UTAS, Nov 2015 ''' # Flatten 2D arrays and find shared valid (non-nan) indices X1 = x1.flatten() X2 = x2.flatten() valid = ~(np.isnan(X1) + np.isnan(X2)) # Create Nx2 array of valid data X = np.zeros((valid.sum(), 2)) X[:,0] = X1[valid] X[:,1] = X2[valid] # Centre data if desired if centred: X[:,0] = X[:,0] - np.mean(X[:,0]) X[:,1] = X[:,1] - np.mean(X[:,1]) # # Calculate pattern correlation pcorr = np.corrcoef(X.T)[0,1] return pcorr def polyArea(x, y): ''' Area of a simple polygon, defined by vertices (x,y) in the plane. Assumes x and y and numpy arrays of the same length. Algorithm is based on the Shoelace Formula https://en.wikipedia.org/wiki/Shoelace_formula http://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates ''' return 0.5*np.abs(np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1))) def point_inside_polygon(x, y, poly): ''' Determine if a point is inside a given polygon or not Polygon is a list of (x,y) pairs. http://www.ariel.com.au/a/python-point-int-poly.html ''' n = len(poly) inside =False # p1x, p1y = poly[0] for i in range(n+1): p2x, p2y = poly[i % n] if y > min(p1y, p2y): if y <= max(p1y, p2y): if x <= max(p1x, p2x): if p1y != p2y: xinters = (y - p1y)*(p2x - p1x)/(p2y - p1y)+p1x if p1x == p2x or x <= xinters: inside = not inside p1x, p1y = p2x, p2y # return inside def pAgree(k, n, p=0.5): ''' Returns the significance level of agreement of k results across n datasets, based on a binomial distribution. Assumes a "fair coin toss" (p=0.5) but this can be optionally changed. For example, if k=9 out of n=10 datasets show a trend of the same sign, this returns the significance level for such a result, assuming independence of the datasets (ha!) and a probability of a trend with that sign of p=0.5. ''' return stats.binom.pmf(np.arange(k,n+1), n, p).sum() def weighted_quantile(values, quantiles, sample_weight=None, values_sorted=False, old_style=False): """ Very close to numpy.percentile, but supports weights. NOTE: quantiles should be in [0, 1]! :param values: numpy.array with data :param quantiles: array-like with many quantiles needed :param sample_weight: array-like of the same length as `array` :param values_sorted: bool, if True, then will avoid sorting of initial array :param old_style: if True, will correct output to be consistent with numpy.percentile. :return: numpy.array with computed quantiles. Authored by Alleo, Apr 16 '15 at 14:22 Source: https://stackoverflow.com/questions/21844024/weighted-percentile-using-numpy """ values = np.array(values) quantiles = np.array(quantiles) if sample_weight is None: sample_weight = np.ones(len(values)) sample_weight = np.array(sample_weight) assert np.all(quantiles >= 0) and np.all(quantiles <= 1), 'quantiles should be in [0, 1]' if not values_sorted: sorter = np.argsort(values) values = values[sorter] sample_weight = sample_weight[sorter] weighted_quantiles = np.cumsum(sample_weight) - 0.5 * sample_weight if old_style: # To be convenient with numpy.percentile weighted_quantiles -= weighted_quantiles[0] weighted_quantiles /= weighted_quantiles[-1] else: weighted_quantiles /= np.sum(sample_weight) return np.interp(quantiles, weighted_quantiles, values) def gaussianKernelSum(X, Y, Z, std): ''' Estimates the 2D probability distribution of scattered data Z on a regular grid (X, Y). Does so by summing Gaussian pdfs centred on each location Z with variance std[0] in the X dimension and STD[1] in the Y dimention. X, Y Matrices of coordinates, in np.meshgrid format Z (2 x N) array of data points, where N is the number of points. The first index of dimension zero corresponds to X, while the second index corresponds to Y. std Standard deviation of Gaussians summed to generate pdf. List of length 2. ''' # Initialize pdf to zero pdf = np.zeros(X.shape) # Add to it a gaussian centred on each of the data points, with the appropriate variance for i in range(Z.shape[1]): pdf += (1./(2*np.pi*std[0]*std[1]))*np.exp(-0.5*(X - Z[0,i])**2/std[0]**2 - 0.5*(Y - Z[1,i])**2/std[1]**2) # return pdf
ttest_serialcorr
identifier_name
ecoliver.py
import numpy as np import scipy as sp from scipy import stats from scipy import linalg import scipy.ndimage as ndimage from datetime import date def find_nearest(array, value): idx=(np.abs(array-value)).argmin() return array[idx], idx def findxy(x, y, loc): ''' Find (i,j) coordinates of loc = (x0,y0) in 2D irregularly spaced coordinate matrices (2D numpy arrays) x and y. ''' # Dimensions Y, X = x.shape # Make matrix of indices iijj = np.meshgrid(range(X), range(Y)) ii = iijj[0].flatten() jj = iijj[1].flatten() # Calculate distance-squared dist2 = (x-loc[0])**2 + (y-loc[1])**2 k = np.argmin(dist2) return ii[k], jj[k] def nanmean(array, axis=None): return np.mean(np.ma.masked_array(array, np.isnan(array)), axis) def nanvar(array, axis=None): return np.var(np.ma.masked_array(array, np.isnan(array)), axis) def nanskew(array, axis=None): # only woks for 1D data return stats.skew(array[np.logical_not(np.isnan(array))]) def nanmax(array, axis=None): maxes = np.max(np.ma.masked_array(array, np.isnan(array)), axis) data = maxes.data mask = maxes.mask return data[np.logical_not(mask)] def nonans(array): ''' Return input array [1D numpy array] with all nan values removed ''' return array[~np.isnan(array)] def nozeros(array): ''' Return input array [1D numpy array] with all zeros removed ''' return array[~(array==0)] def latlon2km(lon1, lat1, lon2, lat2): EARTH_RADIUS = 6378.1 c = np.sin(np.radians(lat1)) * np.sin(np.radians(lat2)) + np.cos(np.radians(lon1-lon2)) * np.cos(np.radians(lat1)) * np.cos(np.radians(lat2)) d = EARTH_RADIUS * np.arccos(c) return d def latlonArea(lon1, lat1, lon2, lat2): ''' Surface area (in km2^) of a lat/lon "rectangle" included between specified longitudes and latitudes ''' EARTH_RADIUS = 6378.1 return (EARTH_RADIUS**2) * np.abs(lon1*np.pi/180. - lon2*np.pi/180.) * np.abs(np.sin(lat1*np.pi/180.) - np.sin(lat2*np.pi/180.)) def dxdy(lon, lat): ''' Takes M+1 length lat and N+1 length lon vectors and returns MxN 2D arrays of distances across cells in x and y directions ''' X = len(lon)-1 Y = len(lat)-1 dx = np.zeros((Y,X)) dy = np.zeros((Y,X)) for j in range(dx.shape[0]): for i in range(dx.shape[1]): dx[j,i] = 1e3 * latlon2km(lon[i+1], lat[j], lon[i], lat[j]) dy[j,i] = 1e3 * latlon2km(lon[i], lat[j+1], lon[i], lat[j]) return dx, dy def gradient(field, dx, dy): ''' Performs the gradient of input field given dx and dy fields (in metres) ''' field_y, field_x = np.gradient(field) field_x = field_x / dx field_y = field_y / dy return field_x, field_y def acf(x): result = np.correlate(x, x, mode = 'full') maxcorr = np.argmax(result) result = result / result[maxcorr] # <=== normalization return result[result.size/2:] def ccf(x, y): ''' Cross-Correlation Function +ve lags mean x leads y -ve lags mean x lags y ''' x = (x-np.mean(x))/(np.std(x)*len(x)) y = (y-np.mean(y))/np.std(y) ccf = np.correlate(x, y, mode='full') lags = np.arange(len(ccf)) - (len(x)-1) return lags, ccf def ttest_serialcorr(x, y): ''' Calculates the t-test for the means of two samples under an assumption of serial correlation, following the technique of Zwiers and von Storch (Journal of Climate, 1995) ''' # Valid (non-Nan) data, and return NaN if insufficient valid data validx = ~np.isnan(x) validy = ~np.isnan(y) if (validx.sum() <= 1) + (validy.sum() <= 1): return np.nan, np.nan else: # Sample lengths nx = len(x[validx]) ny = len(y[validy]) # Autocorrelation Function (pad NaN values for an approximation) rhox = acf(pad(x - np.nanmean(x))) rhoy = acf(pad(y - np.nanmean(y))) # Equivalent sample lengths nx = nx / (1 + ((1-np.arange(1, int(nx))/nx)*rhox[validx][:-1]).sum()) ny = ny / (1 + ((1-np.arange(1, int(ny))/ny)*rhoy[validy][:-1]).sum()) #if (nx < 30) or (ny < 30): # print 'Effective sample size(s) are less than 30: distribution of t statistics will deviate significantly from the t-distribution' # Sample standard deviations sx = np.sqrt(x[validx].var()) sy = np.sqrt(y[validy].var()) s = np.sqrt(sx**2/nx + sy**2/ny) # t-statistic t = (np.nanmean(x) - np.nanmean(y))/s # Degrees of freedom df = (sx**2/nx + sy**2/ny)**2 / ((sx**2/nx)**2/(nx-1) + (sy**2/ny)**2/(ny-1)) # p-value p = 1 - stats.t.cdf(t, df) return t, p def pad(data, maxPadLength=False): ''' Linearly interpolate over missing data (NaNs) in a time series. Inputs: data Time series [1D numpy array] maxPadLength Specifies the maximum length over which to interpolate, i.e., any consecutive blocks of NaNs with length greater than maxPadLength will be left as NaN. Set as an integer. maxPadLength=False (default) interpolates over all NaNs. Written by Eric Oliver, Institue for Marine and Antarctic Studies, University of Tasmania, Jun 2015 ''' data_padded = data.copy() if len(data) == np.isnan(data).sum(): return np.nan*data_padded else: bad_indexes = np.isnan(data) good_indexes = np.logical_not(bad_indexes) good_data = data[good_indexes] interpolated = np.interp(bad_indexes.nonzero()[0], good_indexes.nonzero()[0], good_data) data_padded[bad_indexes] = interpolated if maxPadLength: blocks, n_blocks = ndimage.label(np.isnan(data)) for bl in range(1, n_blocks+1): if (blocks==bl).sum() > maxPadLength: data_padded[blocks==bl] = np.nan return data_padded def runavg_periodic(ts, w): ''' Perform running average of ts (1D numpy array) using uniform window of width w (w must be odd). Assumes periodicity of ts. ''' N = len(ts) ts = np.append(ts, np.append(ts, ts)) ts_smooth = np.convolve(ts, np.ones(w)/w, mode='same') ts = ts_smooth[N:2*N] return ts def runavg(ts, w, mode='same'): ''' Perform running average of ts (1D numpy array) using uniform window of width w (w must be odd). Pads with NaNs outside of valid range. Option 'mode' specifies if output should be defined over ''' if mode == 'same': ts_smooth = np.convolve(ts, np.ones(w)/w, mode=mode) elif mode == 'valid': ts_smooth = np.append(np.append(np.nan*np.ones((w-1)/2), np.convolve(ts, np.ones(w)/w, mode=mode)), np.nan*np.ones((w-1)/2)) return ts_smooth def timevector(date_start, date_end): ''' Generated daily time vector, along with year, month, day, day-of-year, and full date information, given start and and date. Format is a 3-element list so that a start date of 3 May 2005 is specified date_start = [2005,5,3] Note that day-of year (doy) is [0 to 59, 61 to 366] for non-leap years and [0 to 366] for leap years. returns: t, dates, T, year, month, day, doy ''' # Time vector t = np.arange(date(date_start[0],date_start[1],date_start[2]).toordinal(),date(date_end[0],date_end[1],date_end[2]).toordinal()+1) T = len(t) # Date list dates = [date.fromordinal(tt.astype(int)) for tt in t] # Vectors for year, month, day-of-month year = np.zeros((T)) month = np.zeros((T)) day = np.zeros((T)) for tt in range(T): year[tt] = date.fromordinal(t[tt]).year month[tt] = date.fromordinal(t[tt]).month day[tt] = date.fromordinal(t[tt]).day year = year.astype(int) month = month.astype(int) day = day.astype(int) # Leap-year baseline for defining day-of-year values year_leapYear = 2012 # This year was a leap-year and therefore doy in range of 1 to 366 t_leapYear = np.arange(date(year_leapYear, 1, 1).toordinal(),date(year_leapYear, 12, 31).toordinal()+1) dates_leapYear = [date.fromordinal(tt.astype(int)) for tt in t_leapYear] month_leapYear = np.zeros((len(t_leapYear))) day_leapYear = np.zeros((len(t_leapYear))) doy_leapYear = np.zeros((len(t_leapYear))) for tt in range(len(t_leapYear)): month_leapYear[tt] = date.fromordinal(t_leapYear[tt]).month day_leapYear[tt] = date.fromordinal(t_leapYear[tt]).day doy_leapYear[tt] = t_leapYear[tt] - date(date.fromordinal(t_leapYear[tt]).year,1,1).toordinal() + 1 # Calculate day-of-year values doy = np.zeros((T)) for tt in range(T): doy[tt] = doy_leapYear[(month_leapYear == month[tt]) * (day_leapYear == day[tt])] doy = doy.astype(int) return t, dates, T, year, month, day, doy def spatial_filter(field, res, cut_lon, cut_lat): ''' Performs a spatial filter, removing all features with wavelenth scales larger than cut_lon in longitude and cut_lat in latitude from field. Field has spatial resolution of res and land identified by np.nan's ''' field_filt = np.zeros(field.shape) # see Chelton et al, Prog. Ocean., 2011 for explanation of factor of 1/5 sig_lon = (cut_lon/5.) / res sig_lat = (cut_lat/5.) / res land = np.isnan(field) field[land] = nanmean(field) field_filt = field - ndimage.gaussian_filter(field, [sig_lat, sig_lon]) field_filt[land] = np.nan return field_filt def trend(x, y, alpha=0.05):
def trend_TheilSen(x, y, alpha=0.05): ''' Calculates the trend of y given the linear independent variable x. Outputs the mean, trend, and alpha-level (e.g., 0.05 for 95%) confidence limit on the trend. Estimate of the trend uses a Theil-Sen estimator. returns mean, trend, dtrend_95 ''' # Construct matrix of predictors, first column is all ones to estimate the mean, # second column is the time vector, equal to zero at mid-point. X = x-x.mean() # # Predictand (MHW property of interest) valid = ~np.isnan(y) # non-NaN indices # # Perform linear regression over valid indices if np.sum(~np.isnan(y)) > 0: # If at least one non-NaN value slope, y0, beta_lr, beta_up = stats.mstats.theilslopes(y[valid], X[valid], alpha=1-alpha) beta = np.array([y0, slope]) else: beta_lr, beta_up = [np.nan, np.nan] beta = [np.nan, np.nan] # return beta[0], beta[1], [beta_lr, beta_up] def acf(x): result = np.correlate(x, x, mode = 'full') maxcorr = np.argmax(result) result = result / result[maxcorr] # <=== normalization return result[result.size/2:] def ttest_unequalvar(x, y): ''' Calculates the t-test for the means of two samples under an assumption of no serial correlation (but different variances), following the technique of Zwiers and von Storch (Journal of Climate, 1995) ''' # Sample lengths nx = len(x) ny = len(y) # Sample standard deviations sx = np.sqrt(x.var()) sy = np.sqrt(y.var()) s = np.sqrt(sx**2/nx + sy**2/ny) # t-statistic t = np.abs(x.mean() - y.mean())/s # Degrees of freedom df = (sx**2/nx + sy**2/ny)**2 / ((sx**2/nx)**2/(nx-1) + (sy**2/ny)**2/(ny-1)) # p-value p = 1 - stats.t.cdf(t, df) return t, p def pattern_correlation(x1, x2, centred=True): ''' Calculates the pattern correlation of x1 and x2. Assumes and x1 and x2 are 2D numpy arrays. Can handle missing values, even if missing values are distributed differently in x1 and x2. By default calculated the centred pattern correlation (centred =True) in which the spatial means of x1 and x2 are removed prior to calculation. Can calculated uncentred pattern correlation (centred=False) in which these means are not removed. . Written by Eric Oliver, IMAS/UTAS, Nov 2015 ''' # Flatten 2D arrays and find shared valid (non-nan) indices X1 = x1.flatten() X2 = x2.flatten() valid = ~(np.isnan(X1) + np.isnan(X2)) # Create Nx2 array of valid data X = np.zeros((valid.sum(), 2)) X[:,0] = X1[valid] X[:,1] = X2[valid] # Centre data if desired if centred: X[:,0] = X[:,0] - np.mean(X[:,0]) X[:,1] = X[:,1] - np.mean(X[:,1]) # # Calculate pattern correlation pcorr = np.corrcoef(X.T)[0,1] return pcorr def polyArea(x, y): ''' Area of a simple polygon, defined by vertices (x,y) in the plane. Assumes x and y and numpy arrays of the same length. Algorithm is based on the Shoelace Formula https://en.wikipedia.org/wiki/Shoelace_formula http://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates ''' return 0.5*np.abs(np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1))) def point_inside_polygon(x, y, poly): ''' Determine if a point is inside a given polygon or not Polygon is a list of (x,y) pairs. http://www.ariel.com.au/a/python-point-int-poly.html ''' n = len(poly) inside =False # p1x, p1y = poly[0] for i in range(n+1): p2x, p2y = poly[i % n] if y > min(p1y, p2y): if y <= max(p1y, p2y): if x <= max(p1x, p2x): if p1y != p2y: xinters = (y - p1y)*(p2x - p1x)/(p2y - p1y)+p1x if p1x == p2x or x <= xinters: inside = not inside p1x, p1y = p2x, p2y # return inside def pAgree(k, n, p=0.5): ''' Returns the significance level of agreement of k results across n datasets, based on a binomial distribution. Assumes a "fair coin toss" (p=0.5) but this can be optionally changed. For example, if k=9 out of n=10 datasets show a trend of the same sign, this returns the significance level for such a result, assuming independence of the datasets (ha!) and a probability of a trend with that sign of p=0.5. ''' return stats.binom.pmf(np.arange(k,n+1), n, p).sum() def weighted_quantile(values, quantiles, sample_weight=None, values_sorted=False, old_style=False): """ Very close to numpy.percentile, but supports weights. NOTE: quantiles should be in [0, 1]! :param values: numpy.array with data :param quantiles: array-like with many quantiles needed :param sample_weight: array-like of the same length as `array` :param values_sorted: bool, if True, then will avoid sorting of initial array :param old_style: if True, will correct output to be consistent with numpy.percentile. :return: numpy.array with computed quantiles. Authored by Alleo, Apr 16 '15 at 14:22 Source: https://stackoverflow.com/questions/21844024/weighted-percentile-using-numpy """ values = np.array(values) quantiles = np.array(quantiles) if sample_weight is None: sample_weight = np.ones(len(values)) sample_weight = np.array(sample_weight) assert np.all(quantiles >= 0) and np.all(quantiles <= 1), 'quantiles should be in [0, 1]' if not values_sorted: sorter = np.argsort(values) values = values[sorter] sample_weight = sample_weight[sorter] weighted_quantiles = np.cumsum(sample_weight) - 0.5 * sample_weight if old_style: # To be convenient with numpy.percentile weighted_quantiles -= weighted_quantiles[0] weighted_quantiles /= weighted_quantiles[-1] else: weighted_quantiles /= np.sum(sample_weight) return np.interp(quantiles, weighted_quantiles, values) def gaussianKernelSum(X, Y, Z, std): ''' Estimates the 2D probability distribution of scattered data Z on a regular grid (X, Y). Does so by summing Gaussian pdfs centred on each location Z with variance std[0] in the X dimension and STD[1] in the Y dimention. X, Y Matrices of coordinates, in np.meshgrid format Z (2 x N) array of data points, where N is the number of points. The first index of dimension zero corresponds to X, while the second index corresponds to Y. std Standard deviation of Gaussians summed to generate pdf. List of length 2. ''' # Initialize pdf to zero pdf = np.zeros(X.shape) # Add to it a gaussian centred on each of the data points, with the appropriate variance for i in range(Z.shape[1]): pdf += (1./(2*np.pi*std[0]*std[1]))*np.exp(-0.5*(X - Z[0,i])**2/std[0]**2 - 0.5*(Y - Z[1,i])**2/std[1]**2) # return pdf
''' Calculates the trend of y given the linear independent variable x. Outputs the mean, trend, and alpha-level (e.g., 0.05 for 95%) confidence limit on the trend. returns mean, trend, dtrend_95 ''' valid = ~np.isnan(y) if valid.sum() <= 1: return np.nan, np.nan, np.nan else: X = np.array([np.ones(len(x)), x-x.mean()]) beta = linalg.lstsq(X[:,valid].T, y[valid])[0] yhat = np.sum(beta*X.T, axis=1) t_stat = stats.t.isf(alpha/2, len(x[valid])-2) s = np.sqrt(np.sum((y[valid] - yhat[valid])**2) / (len(x[valid])-2)) Sxx = np.sum(X[1,valid]**2) - (np.sum(X[1,valid])**2)/len(x[valid]) # np.var(X, axis=1)[1] return beta[0], beta[1], t_stat * s / np.sqrt(Sxx)
identifier_body
window.rs
use crate::prelude::*; use crossterm::style::{Attribute, Print, Styler}; use crossterm::{cursor, terminal, ExecutableCommand, QueueableCommand}; use std::io::{stdout, Write}; const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION"); pub struct Window { started_at: StartedAt, lines: u16, cols: u16, options: Arc<Mutex<Options>>, alltime_stats: Arc<Mutex<Stats>>, ring_buffer: Arc<Mutex<RingBuffer>>, } impl Window { pub fn new( options: Arc<Mutex<Options>>, alltime_stats: Arc<Mutex<Stats>>, ring_buffer: Arc<Mutex<RingBuffer>>, ) -> Self { let (cols, lines) = crossterm::terminal::size().unwrap(); let now = std::time::Instant::now(); Window { started_at: StartedAt(now), lines, cols, options, alltime_stats, ring_buffer, } } pub fn run(&mut self) -> Result<(), Error> { // temporary lock on options to get interval let options = self.options.lock().unwrap(); let interval = options.interval; drop(options); // support f64 seconds by multiplying then using from_millis let interval = std::time::Duration::from_millis((interval * 1000.0) as u64); crossterm::terminal::enable_raw_mode()?; // stdout().execute(crossterm::event::EnableMouseCapture)? stdout().execute(cursor::Hide)?; stdout().execute(terminal::EnterAlternateScreen)?; stdout().execute(terminal::SetTitle("apachetop"))?; loop { self.redraw()?; if crossterm::event::poll(interval)? && !self.handle_event()? { break; } } crossterm::terminal::disable_raw_mode()?; stdout().execute(terminal::LeaveAlternateScreen)?; stdout().execute(cursor::Show)?; // stdout().execute(crossterm::event::DisableMouseCapture)?; Ok(()) } fn redraw(&mut self) -> Result<(), Error> { let mut stdout = stdout(); stdout .queue(terminal::Clear(terminal::ClearType::All))? .queue(cursor::MoveTo(0, 0))? .queue(Print(format!("apachetop {}", CARGO_PKG_VERSION)))? .queue(cursor::MoveTo(self.cols / 2, 0))? .queue(Print(self.started_at.to_string()))? .queue(cursor::MoveTo(self.cols - 8 as u16, 0))? .queue(Print(chrono::Local::now().format("%H:%M:%S").to_string()))?; { let alltime_stats = self.alltime_stats.lock().unwrap(); let elapsed = self.started_at.elapsed().as_secs() as f64; stdout .queue(cursor::MoveTo(0, 1))? .queue(Print(self.primary_stats_line( &alltime_stats, elapsed, true, )))? .queue(cursor::MoveTo(0, 2))? .queue(Print(self.per_code_line(&alltime_stats)))?; } // mutex on alltime_stats { let mut ring_buffer = self.ring_buffer.lock().unwrap(); // TODO: better in another thread, not at display time? ring_buffer.cleanup()?; let elapsed = match ring_buffer.first() { Some(f) => { let first = chrono::DateTime::<chrono::Utc>::from(f.time); (chrono::Utc::now() - first).num_seconds() as f64 } None => 1.0, // avoid divide by zero later }; stdout .queue(cursor::MoveTo(0, 3))? .queue(Print(self.primary_stats_line( &ring_buffer.stats, elapsed, false, )))? .queue(cursor::MoveTo(0, 4))? .queue(Print(self.per_code_line(&ring_buffer.stats)))?; { let options = self.options.lock().unwrap(); stdout.queue(cursor::MoveTo(0, 6))?.queue(Print( format!( "{:width$}", &format!( " REQS REQS/S SIZE SZ/S {}", options.group.to_string() ), width = self.cols as usize ) .negative(), ))?; } // read lock on options if let Some(grouped) = &ring_buffer.grouped { use lazysort::SortedBy; // convert HashMap<GroupKey, RingBuffer> to Vec<(GroupKey, RingBuffer)>, // sort it by the RingBuffers, then lazy-sort the first n lines for display. for (key, ring_buffer) in grouped .iter() .filter(|(_, v)| !v.buffer.is_empty()) // filter out empty buffers to save work .collect::<Vec<(&GroupKey, &RingBuffer)>>() .iter() .sorted_by(|a, b| b.1.cmp(&a.1)) // see impl Ord for RingBuffer .take((self.lines - 7/* lines used for header */) as usize) { stdout .queue(cursor::MoveToNextLine(1))? .queue(Print(self.table_line(key, ring_buffer, elapsed)))?; } } } // mutex on ring_buffer stdout.flush()?; Ok(()) } fn
(&mut self) -> Result<bool, Error> { use crossterm::event::Event::{Key, Mouse, Resize}; use crossterm::event::KeyCode::Char; use crossterm::event::{KeyEvent, KeyModifiers}; match crossterm::event::read()? { Key(KeyEvent { code: Char('q'), .. }) | Key(KeyEvent { modifiers: KeyModifiers::CONTROL, code: Char('c'), }) => return Ok(false), Key(KeyEvent { code: Char('o'), .. }) => { self.toggle_sort(); } Key(KeyEvent { code: Char('g'), .. }) => { self.toggle_group(); } Key(event) => info!("{:?}", event), Mouse(event) => info!("{:?}", event), Resize(cols, lines) => { self.lines = lines; self.cols = cols; } } Ok(true) } fn toggle_sort(&self) { self.options.lock().unwrap().toggle_sort(); } fn toggle_group(&self) { let mut o = self.options.lock().unwrap(); let group_by = o.toggle_group(); drop(o); self.ring_buffer.lock().unwrap().regroup(group_by); } fn table_line(&self, key: &GroupKey, rr: &RingBuffer, elapsed: f64) -> String { let reqs = rr.stats.global.requests as f64; format!( "{reqs:6} {reqs_per_sec:6.2} {hb:>6} {hb_per_sec:>6} {key:width$}", width = (self.cols - 30) as usize, reqs = reqs, reqs_per_sec = reqs / elapsed, hb = Self::humansize(rr.stats.global.bytes as f64), hb_per_sec = Self::humansize(rr.stats.global.bytes as f64 / elapsed), key = key ) } // All: 638924 reqs ( 182.65/sec) 3433539K ( 981.6K/sec) ( 5.4K/req) fn primary_stats_line(&self, stats: &Stats, elapsed: f64, alltime: bool) -> String { let reqs_non_zero = std::cmp::max(stats.global.requests, 1) as f64; let reqs = stats.global.requests as f64; let header = if alltime { "All:" } else { "R:" }; format!( "{header:5} {bold}{reqs:>space$}{reset} ({reqs_per_sec:6.2}/sec) {bold}{hb:>space$}{reset} ({hb_per_sec}/sec) {hb_per_req}/req", bold = Attribute::Bold, reset = Attribute::Reset, space = ((self.cols - 50) / 2) as usize, header = header, reqs = reqs, reqs_per_sec = reqs / elapsed, hb = Self::humansize(stats.global.bytes as f64), hb_per_sec = Self::humansize(stats.global.bytes as f64 / elapsed), hb_per_req = Self::humansize((stats.global.bytes as f64) / reqs_non_zero) ) } // 2xx: 455415 (71.3%) 3xx: 175745 (27.5%) 4xx: 7746 ( 1.2%) 5xx: 10 ( 0.0%) fn per_code_line(&self, stats: &Stats) -> String { let stats_2 = &stats.by_status_code[2]; let stats_3 = &stats.by_status_code[3]; let stats_4 = &stats.by_status_code[4]; let stats_5 = &stats.by_status_code[5]; // closure to reduce some duplication for some munging below let c = |rb_stats: &crate::stats::Counters| -> (f64, usize) { // avoid divide by zero if there's no requests yet let pct = if stats.global.requests > 0 { 100.0 * (rb_stats.requests as f64 / stats.global.requests as f64) } else { 0.0 }; // intelligent dp detection: eg 2.34%, 10.5%, 100% let dp = if (pct - 100.0).abs() < f64::EPSILON { 0 } else if pct < 10.0 { 2 } else { 1 }; (pct, dp) }; let (code_2_pct, code_2_dp) = c(stats_2); let (code_3_pct, code_3_dp) = c(stats_3); let (code_4_pct, code_4_dp) = c(stats_4); let (code_5_pct, code_5_dp) = c(stats_5); format!( "2xx: {code_2:space$} ({code_2_pct:4.code_2_dp$}%) 3xx: {code_3:space$} ({code_3_pct:4.code_3_dp$}%) 4xx: {code_4:space$} ({code_4_pct:4.code_4_dp$}%) 5xx: {code_5:space$} ({code_5_pct:4.code_5_dp$}%)", space = ((self.cols - 55) / 4) as usize, code_2 = stats_2.requests, code_2_dp = code_2_dp, code_2_pct = code_2_pct, code_3 = stats_3.requests, code_3_dp = code_3_dp, code_3_pct = code_3_pct, code_4 = stats_4.requests, code_4_dp = code_4_dp, code_4_pct = code_4_pct, code_5 = stats_5.requests, code_5_dp = code_5_dp, code_5_pct = code_5_pct, ) } fn humansize(bytes: f64) -> String { if bytes > 1073741824.0 { format!("{:6.2}G", (bytes / 1073741824.0)) } else if bytes > 1048576.0 { format!("{:6.2}M", (bytes / 1048576.00)) } else if bytes > 1024.0 { format!("{:6.2}K", (bytes / 1024.0)) } else { format!("{:6.0}B", bytes) } } } struct StartedAt(std::time::Instant); impl StartedAt { fn elapsed(&self) -> std::time::Duration { self.0.elapsed() } } impl std::fmt::Display for StartedAt { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let duration = self.0.elapsed().as_secs(); let hours = duration / 3600; let minutes = duration % 3600 / 60; let seconds = duration % 60; write!(f, "runtime: ")?; if hours > 0 { write!(f, "{}h ", hours)?; } if hours > 0 || minutes > 0 { write!(f, "{}m ", minutes)?; } write!(f, "{}s", seconds) } }
handle_event
identifier_name
window.rs
use crate::prelude::*; use crossterm::style::{Attribute, Print, Styler}; use crossterm::{cursor, terminal, ExecutableCommand, QueueableCommand}; use std::io::{stdout, Write}; const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION"); pub struct Window { started_at: StartedAt, lines: u16, cols: u16, options: Arc<Mutex<Options>>, alltime_stats: Arc<Mutex<Stats>>, ring_buffer: Arc<Mutex<RingBuffer>>, } impl Window { pub fn new( options: Arc<Mutex<Options>>, alltime_stats: Arc<Mutex<Stats>>, ring_buffer: Arc<Mutex<RingBuffer>>, ) -> Self { let (cols, lines) = crossterm::terminal::size().unwrap(); let now = std::time::Instant::now(); Window { started_at: StartedAt(now), lines, cols, options, alltime_stats, ring_buffer, } } pub fn run(&mut self) -> Result<(), Error> { // temporary lock on options to get interval let options = self.options.lock().unwrap(); let interval = options.interval; drop(options); // support f64 seconds by multiplying then using from_millis let interval = std::time::Duration::from_millis((interval * 1000.0) as u64); crossterm::terminal::enable_raw_mode()?; // stdout().execute(crossterm::event::EnableMouseCapture)? stdout().execute(cursor::Hide)?; stdout().execute(terminal::EnterAlternateScreen)?; stdout().execute(terminal::SetTitle("apachetop"))?; loop { self.redraw()?; if crossterm::event::poll(interval)? && !self.handle_event()? { break; } } crossterm::terminal::disable_raw_mode()?; stdout().execute(terminal::LeaveAlternateScreen)?; stdout().execute(cursor::Show)?; // stdout().execute(crossterm::event::DisableMouseCapture)?; Ok(()) } fn redraw(&mut self) -> Result<(), Error>
fn handle_event(&mut self) -> Result<bool, Error> { use crossterm::event::Event::{Key, Mouse, Resize}; use crossterm::event::KeyCode::Char; use crossterm::event::{KeyEvent, KeyModifiers}; match crossterm::event::read()? { Key(KeyEvent { code: Char('q'), .. }) | Key(KeyEvent { modifiers: KeyModifiers::CONTROL, code: Char('c'), }) => return Ok(false), Key(KeyEvent { code: Char('o'), .. }) => { self.toggle_sort(); } Key(KeyEvent { code: Char('g'), .. }) => { self.toggle_group(); } Key(event) => info!("{:?}", event), Mouse(event) => info!("{:?}", event), Resize(cols, lines) => { self.lines = lines; self.cols = cols; } } Ok(true) } fn toggle_sort(&self) { self.options.lock().unwrap().toggle_sort(); } fn toggle_group(&self) { let mut o = self.options.lock().unwrap(); let group_by = o.toggle_group(); drop(o); self.ring_buffer.lock().unwrap().regroup(group_by); } fn table_line(&self, key: &GroupKey, rr: &RingBuffer, elapsed: f64) -> String { let reqs = rr.stats.global.requests as f64; format!( "{reqs:6} {reqs_per_sec:6.2} {hb:>6} {hb_per_sec:>6} {key:width$}", width = (self.cols - 30) as usize, reqs = reqs, reqs_per_sec = reqs / elapsed, hb = Self::humansize(rr.stats.global.bytes as f64), hb_per_sec = Self::humansize(rr.stats.global.bytes as f64 / elapsed), key = key ) } // All: 638924 reqs ( 182.65/sec) 3433539K ( 981.6K/sec) ( 5.4K/req) fn primary_stats_line(&self, stats: &Stats, elapsed: f64, alltime: bool) -> String { let reqs_non_zero = std::cmp::max(stats.global.requests, 1) as f64; let reqs = stats.global.requests as f64; let header = if alltime { "All:" } else { "R:" }; format!( "{header:5} {bold}{reqs:>space$}{reset} ({reqs_per_sec:6.2}/sec) {bold}{hb:>space$}{reset} ({hb_per_sec}/sec) {hb_per_req}/req", bold = Attribute::Bold, reset = Attribute::Reset, space = ((self.cols - 50) / 2) as usize, header = header, reqs = reqs, reqs_per_sec = reqs / elapsed, hb = Self::humansize(stats.global.bytes as f64), hb_per_sec = Self::humansize(stats.global.bytes as f64 / elapsed), hb_per_req = Self::humansize((stats.global.bytes as f64) / reqs_non_zero) ) } // 2xx: 455415 (71.3%) 3xx: 175745 (27.5%) 4xx: 7746 ( 1.2%) 5xx: 10 ( 0.0%) fn per_code_line(&self, stats: &Stats) -> String { let stats_2 = &stats.by_status_code[2]; let stats_3 = &stats.by_status_code[3]; let stats_4 = &stats.by_status_code[4]; let stats_5 = &stats.by_status_code[5]; // closure to reduce some duplication for some munging below let c = |rb_stats: &crate::stats::Counters| -> (f64, usize) { // avoid divide by zero if there's no requests yet let pct = if stats.global.requests > 0 { 100.0 * (rb_stats.requests as f64 / stats.global.requests as f64) } else { 0.0 }; // intelligent dp detection: eg 2.34%, 10.5%, 100% let dp = if (pct - 100.0).abs() < f64::EPSILON { 0 } else if pct < 10.0 { 2 } else { 1 }; (pct, dp) }; let (code_2_pct, code_2_dp) = c(stats_2); let (code_3_pct, code_3_dp) = c(stats_3); let (code_4_pct, code_4_dp) = c(stats_4); let (code_5_pct, code_5_dp) = c(stats_5); format!( "2xx: {code_2:space$} ({code_2_pct:4.code_2_dp$}%) 3xx: {code_3:space$} ({code_3_pct:4.code_3_dp$}%) 4xx: {code_4:space$} ({code_4_pct:4.code_4_dp$}%) 5xx: {code_5:space$} ({code_5_pct:4.code_5_dp$}%)", space = ((self.cols - 55) / 4) as usize, code_2 = stats_2.requests, code_2_dp = code_2_dp, code_2_pct = code_2_pct, code_3 = stats_3.requests, code_3_dp = code_3_dp, code_3_pct = code_3_pct, code_4 = stats_4.requests, code_4_dp = code_4_dp, code_4_pct = code_4_pct, code_5 = stats_5.requests, code_5_dp = code_5_dp, code_5_pct = code_5_pct, ) } fn humansize(bytes: f64) -> String { if bytes > 1073741824.0 { format!("{:6.2}G", (bytes / 1073741824.0)) } else if bytes > 1048576.0 { format!("{:6.2}M", (bytes / 1048576.00)) } else if bytes > 1024.0 { format!("{:6.2}K", (bytes / 1024.0)) } else { format!("{:6.0}B", bytes) } } } struct StartedAt(std::time::Instant); impl StartedAt { fn elapsed(&self) -> std::time::Duration { self.0.elapsed() } } impl std::fmt::Display for StartedAt { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let duration = self.0.elapsed().as_secs(); let hours = duration / 3600; let minutes = duration % 3600 / 60; let seconds = duration % 60; write!(f, "runtime: ")?; if hours > 0 { write!(f, "{}h ", hours)?; } if hours > 0 || minutes > 0 { write!(f, "{}m ", minutes)?; } write!(f, "{}s", seconds) } }
{ let mut stdout = stdout(); stdout .queue(terminal::Clear(terminal::ClearType::All))? .queue(cursor::MoveTo(0, 0))? .queue(Print(format!("apachetop {}", CARGO_PKG_VERSION)))? .queue(cursor::MoveTo(self.cols / 2, 0))? .queue(Print(self.started_at.to_string()))? .queue(cursor::MoveTo(self.cols - 8 as u16, 0))? .queue(Print(chrono::Local::now().format("%H:%M:%S").to_string()))?; { let alltime_stats = self.alltime_stats.lock().unwrap(); let elapsed = self.started_at.elapsed().as_secs() as f64; stdout .queue(cursor::MoveTo(0, 1))? .queue(Print(self.primary_stats_line( &alltime_stats, elapsed, true, )))? .queue(cursor::MoveTo(0, 2))? .queue(Print(self.per_code_line(&alltime_stats)))?; } // mutex on alltime_stats { let mut ring_buffer = self.ring_buffer.lock().unwrap(); // TODO: better in another thread, not at display time? ring_buffer.cleanup()?; let elapsed = match ring_buffer.first() { Some(f) => { let first = chrono::DateTime::<chrono::Utc>::from(f.time); (chrono::Utc::now() - first).num_seconds() as f64 } None => 1.0, // avoid divide by zero later }; stdout .queue(cursor::MoveTo(0, 3))? .queue(Print(self.primary_stats_line( &ring_buffer.stats, elapsed, false, )))? .queue(cursor::MoveTo(0, 4))? .queue(Print(self.per_code_line(&ring_buffer.stats)))?; { let options = self.options.lock().unwrap(); stdout.queue(cursor::MoveTo(0, 6))?.queue(Print( format!( "{:width$}", &format!( " REQS REQS/S SIZE SZ/S {}", options.group.to_string() ), width = self.cols as usize ) .negative(), ))?; } // read lock on options if let Some(grouped) = &ring_buffer.grouped { use lazysort::SortedBy; // convert HashMap<GroupKey, RingBuffer> to Vec<(GroupKey, RingBuffer)>, // sort it by the RingBuffers, then lazy-sort the first n lines for display. for (key, ring_buffer) in grouped .iter() .filter(|(_, v)| !v.buffer.is_empty()) // filter out empty buffers to save work .collect::<Vec<(&GroupKey, &RingBuffer)>>() .iter() .sorted_by(|a, b| b.1.cmp(&a.1)) // see impl Ord for RingBuffer .take((self.lines - 7/* lines used for header */) as usize) { stdout .queue(cursor::MoveToNextLine(1))? .queue(Print(self.table_line(key, ring_buffer, elapsed)))?; } } } // mutex on ring_buffer stdout.flush()?; Ok(()) }
identifier_body
window.rs
use crate::prelude::*; use crossterm::style::{Attribute, Print, Styler}; use crossterm::{cursor, terminal, ExecutableCommand, QueueableCommand}; use std::io::{stdout, Write}; const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION"); pub struct Window { started_at: StartedAt, lines: u16, cols: u16, options: Arc<Mutex<Options>>, alltime_stats: Arc<Mutex<Stats>>, ring_buffer: Arc<Mutex<RingBuffer>>, } impl Window { pub fn new( options: Arc<Mutex<Options>>, alltime_stats: Arc<Mutex<Stats>>, ring_buffer: Arc<Mutex<RingBuffer>>, ) -> Self { let (cols, lines) = crossterm::terminal::size().unwrap(); let now = std::time::Instant::now(); Window { started_at: StartedAt(now), lines, cols, options, alltime_stats, ring_buffer, } } pub fn run(&mut self) -> Result<(), Error> { // temporary lock on options to get interval let options = self.options.lock().unwrap(); let interval = options.interval; drop(options); // support f64 seconds by multiplying then using from_millis let interval = std::time::Duration::from_millis((interval * 1000.0) as u64); crossterm::terminal::enable_raw_mode()?; // stdout().execute(crossterm::event::EnableMouseCapture)? stdout().execute(cursor::Hide)?; stdout().execute(terminal::EnterAlternateScreen)?; stdout().execute(terminal::SetTitle("apachetop"))?; loop { self.redraw()?; if crossterm::event::poll(interval)? && !self.handle_event()? { break; } } crossterm::terminal::disable_raw_mode()?; stdout().execute(terminal::LeaveAlternateScreen)?; stdout().execute(cursor::Show)?; // stdout().execute(crossterm::event::DisableMouseCapture)?; Ok(()) } fn redraw(&mut self) -> Result<(), Error> { let mut stdout = stdout(); stdout .queue(terminal::Clear(terminal::ClearType::All))? .queue(cursor::MoveTo(0, 0))? .queue(Print(format!("apachetop {}", CARGO_PKG_VERSION)))? .queue(cursor::MoveTo(self.cols / 2, 0))? .queue(Print(self.started_at.to_string()))? .queue(cursor::MoveTo(self.cols - 8 as u16, 0))? .queue(Print(chrono::Local::now().format("%H:%M:%S").to_string()))?; { let alltime_stats = self.alltime_stats.lock().unwrap(); let elapsed = self.started_at.elapsed().as_secs() as f64; stdout .queue(cursor::MoveTo(0, 1))? .queue(Print(self.primary_stats_line( &alltime_stats, elapsed, true, )))? .queue(cursor::MoveTo(0, 2))? .queue(Print(self.per_code_line(&alltime_stats)))?; } // mutex on alltime_stats { let mut ring_buffer = self.ring_buffer.lock().unwrap(); // TODO: better in another thread, not at display time? ring_buffer.cleanup()?; let elapsed = match ring_buffer.first() { Some(f) => { let first = chrono::DateTime::<chrono::Utc>::from(f.time); (chrono::Utc::now() - first).num_seconds() as f64 } None => 1.0, // avoid divide by zero later }; stdout .queue(cursor::MoveTo(0, 3))? .queue(Print(self.primary_stats_line( &ring_buffer.stats, elapsed, false, )))? .queue(cursor::MoveTo(0, 4))? .queue(Print(self.per_code_line(&ring_buffer.stats)))?; { let options = self.options.lock().unwrap(); stdout.queue(cursor::MoveTo(0, 6))?.queue(Print( format!( "{:width$}", &format!( " REQS REQS/S SIZE SZ/S {}", options.group.to_string() ), width = self.cols as usize ) .negative(), ))?; } // read lock on options if let Some(grouped) = &ring_buffer.grouped { use lazysort::SortedBy; // convert HashMap<GroupKey, RingBuffer> to Vec<(GroupKey, RingBuffer)>, // sort it by the RingBuffers, then lazy-sort the first n lines for display. for (key, ring_buffer) in grouped .iter() .filter(|(_, v)| !v.buffer.is_empty()) // filter out empty buffers to save work .collect::<Vec<(&GroupKey, &RingBuffer)>>() .iter() .sorted_by(|a, b| b.1.cmp(&a.1)) // see impl Ord for RingBuffer .take((self.lines - 7/* lines used for header */) as usize) { stdout .queue(cursor::MoveToNextLine(1))? .queue(Print(self.table_line(key, ring_buffer, elapsed)))?; } } } // mutex on ring_buffer stdout.flush()?; Ok(()) } fn handle_event(&mut self) -> Result<bool, Error> { use crossterm::event::Event::{Key, Mouse, Resize}; use crossterm::event::KeyCode::Char; use crossterm::event::{KeyEvent, KeyModifiers}; match crossterm::event::read()? { Key(KeyEvent { code: Char('q'), .. }) | Key(KeyEvent { modifiers: KeyModifiers::CONTROL, code: Char('c'), }) => return Ok(false), Key(KeyEvent { code: Char('o'), .. }) => { self.toggle_sort(); } Key(KeyEvent { code: Char('g'), .. }) => { self.toggle_group(); } Key(event) => info!("{:?}", event), Mouse(event) => info!("{:?}", event), Resize(cols, lines) => { self.lines = lines; self.cols = cols; } } Ok(true) } fn toggle_sort(&self) { self.options.lock().unwrap().toggle_sort(); } fn toggle_group(&self) { let mut o = self.options.lock().unwrap(); let group_by = o.toggle_group(); drop(o); self.ring_buffer.lock().unwrap().regroup(group_by); } fn table_line(&self, key: &GroupKey, rr: &RingBuffer, elapsed: f64) -> String { let reqs = rr.stats.global.requests as f64; format!( "{reqs:6} {reqs_per_sec:6.2} {hb:>6} {hb_per_sec:>6} {key:width$}", width = (self.cols - 30) as usize, reqs = reqs, reqs_per_sec = reqs / elapsed, hb = Self::humansize(rr.stats.global.bytes as f64), hb_per_sec = Self::humansize(rr.stats.global.bytes as f64 / elapsed), key = key ) } // All: 638924 reqs ( 182.65/sec) 3433539K ( 981.6K/sec) ( 5.4K/req) fn primary_stats_line(&self, stats: &Stats, elapsed: f64, alltime: bool) -> String { let reqs_non_zero = std::cmp::max(stats.global.requests, 1) as f64; let reqs = stats.global.requests as f64; let header = if alltime { "All:" } else { "R:" }; format!( "{header:5} {bold}{reqs:>space$}{reset} ({reqs_per_sec:6.2}/sec) {bold}{hb:>space$}{reset} ({hb_per_sec}/sec) {hb_per_req}/req", bold = Attribute::Bold, reset = Attribute::Reset, space = ((self.cols - 50) / 2) as usize, header = header, reqs = reqs, reqs_per_sec = reqs / elapsed, hb = Self::humansize(stats.global.bytes as f64), hb_per_sec = Self::humansize(stats.global.bytes as f64 / elapsed), hb_per_req = Self::humansize((stats.global.bytes as f64) / reqs_non_zero) ) } // 2xx: 455415 (71.3%) 3xx: 175745 (27.5%) 4xx: 7746 ( 1.2%) 5xx: 10 ( 0.0%) fn per_code_line(&self, stats: &Stats) -> String { let stats_2 = &stats.by_status_code[2]; let stats_3 = &stats.by_status_code[3]; let stats_4 = &stats.by_status_code[4]; let stats_5 = &stats.by_status_code[5]; // closure to reduce some duplication for some munging below let c = |rb_stats: &crate::stats::Counters| -> (f64, usize) { // avoid divide by zero if there's no requests yet let pct = if stats.global.requests > 0
else { 0.0 }; // intelligent dp detection: eg 2.34%, 10.5%, 100% let dp = if (pct - 100.0).abs() < f64::EPSILON { 0 } else if pct < 10.0 { 2 } else { 1 }; (pct, dp) }; let (code_2_pct, code_2_dp) = c(stats_2); let (code_3_pct, code_3_dp) = c(stats_3); let (code_4_pct, code_4_dp) = c(stats_4); let (code_5_pct, code_5_dp) = c(stats_5); format!( "2xx: {code_2:space$} ({code_2_pct:4.code_2_dp$}%) 3xx: {code_3:space$} ({code_3_pct:4.code_3_dp$}%) 4xx: {code_4:space$} ({code_4_pct:4.code_4_dp$}%) 5xx: {code_5:space$} ({code_5_pct:4.code_5_dp$}%)", space = ((self.cols - 55) / 4) as usize, code_2 = stats_2.requests, code_2_dp = code_2_dp, code_2_pct = code_2_pct, code_3 = stats_3.requests, code_3_dp = code_3_dp, code_3_pct = code_3_pct, code_4 = stats_4.requests, code_4_dp = code_4_dp, code_4_pct = code_4_pct, code_5 = stats_5.requests, code_5_dp = code_5_dp, code_5_pct = code_5_pct, ) } fn humansize(bytes: f64) -> String { if bytes > 1073741824.0 { format!("{:6.2}G", (bytes / 1073741824.0)) } else if bytes > 1048576.0 { format!("{:6.2}M", (bytes / 1048576.00)) } else if bytes > 1024.0 { format!("{:6.2}K", (bytes / 1024.0)) } else { format!("{:6.0}B", bytes) } } } struct StartedAt(std::time::Instant); impl StartedAt { fn elapsed(&self) -> std::time::Duration { self.0.elapsed() } } impl std::fmt::Display for StartedAt { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let duration = self.0.elapsed().as_secs(); let hours = duration / 3600; let minutes = duration % 3600 / 60; let seconds = duration % 60; write!(f, "runtime: ")?; if hours > 0 { write!(f, "{}h ", hours)?; } if hours > 0 || minutes > 0 { write!(f, "{}m ", minutes)?; } write!(f, "{}s", seconds) } }
{ 100.0 * (rb_stats.requests as f64 / stats.global.requests as f64) }
conditional_block
window.rs
use crate::prelude::*; use crossterm::style::{Attribute, Print, Styler}; use crossterm::{cursor, terminal, ExecutableCommand, QueueableCommand}; use std::io::{stdout, Write}; const CARGO_PKG_VERSION: &str = env!("CARGO_PKG_VERSION"); pub struct Window { started_at: StartedAt, lines: u16, cols: u16, options: Arc<Mutex<Options>>, alltime_stats: Arc<Mutex<Stats>>, ring_buffer: Arc<Mutex<RingBuffer>>, } impl Window { pub fn new( options: Arc<Mutex<Options>>, alltime_stats: Arc<Mutex<Stats>>, ring_buffer: Arc<Mutex<RingBuffer>>, ) -> Self { let (cols, lines) = crossterm::terminal::size().unwrap(); let now = std::time::Instant::now(); Window { started_at: StartedAt(now), lines, cols, options, alltime_stats, ring_buffer, } } pub fn run(&mut self) -> Result<(), Error> { // temporary lock on options to get interval let options = self.options.lock().unwrap(); let interval = options.interval; drop(options); // support f64 seconds by multiplying then using from_millis let interval = std::time::Duration::from_millis((interval * 1000.0) as u64); crossterm::terminal::enable_raw_mode()?; // stdout().execute(crossterm::event::EnableMouseCapture)? stdout().execute(cursor::Hide)?; stdout().execute(terminal::EnterAlternateScreen)?; stdout().execute(terminal::SetTitle("apachetop"))?; loop { self.redraw()?; if crossterm::event::poll(interval)? && !self.handle_event()? { break; } } crossterm::terminal::disable_raw_mode()?; stdout().execute(terminal::LeaveAlternateScreen)?; stdout().execute(cursor::Show)?; // stdout().execute(crossterm::event::DisableMouseCapture)?; Ok(()) } fn redraw(&mut self) -> Result<(), Error> { let mut stdout = stdout(); stdout .queue(terminal::Clear(terminal::ClearType::All))?
.queue(Print(format!("apachetop {}", CARGO_PKG_VERSION)))? .queue(cursor::MoveTo(self.cols / 2, 0))? .queue(Print(self.started_at.to_string()))? .queue(cursor::MoveTo(self.cols - 8 as u16, 0))? .queue(Print(chrono::Local::now().format("%H:%M:%S").to_string()))?; { let alltime_stats = self.alltime_stats.lock().unwrap(); let elapsed = self.started_at.elapsed().as_secs() as f64; stdout .queue(cursor::MoveTo(0, 1))? .queue(Print(self.primary_stats_line( &alltime_stats, elapsed, true, )))? .queue(cursor::MoveTo(0, 2))? .queue(Print(self.per_code_line(&alltime_stats)))?; } // mutex on alltime_stats { let mut ring_buffer = self.ring_buffer.lock().unwrap(); // TODO: better in another thread, not at display time? ring_buffer.cleanup()?; let elapsed = match ring_buffer.first() { Some(f) => { let first = chrono::DateTime::<chrono::Utc>::from(f.time); (chrono::Utc::now() - first).num_seconds() as f64 } None => 1.0, // avoid divide by zero later }; stdout .queue(cursor::MoveTo(0, 3))? .queue(Print(self.primary_stats_line( &ring_buffer.stats, elapsed, false, )))? .queue(cursor::MoveTo(0, 4))? .queue(Print(self.per_code_line(&ring_buffer.stats)))?; { let options = self.options.lock().unwrap(); stdout.queue(cursor::MoveTo(0, 6))?.queue(Print( format!( "{:width$}", &format!( " REQS REQS/S SIZE SZ/S {}", options.group.to_string() ), width = self.cols as usize ) .negative(), ))?; } // read lock on options if let Some(grouped) = &ring_buffer.grouped { use lazysort::SortedBy; // convert HashMap<GroupKey, RingBuffer> to Vec<(GroupKey, RingBuffer)>, // sort it by the RingBuffers, then lazy-sort the first n lines for display. for (key, ring_buffer) in grouped .iter() .filter(|(_, v)| !v.buffer.is_empty()) // filter out empty buffers to save work .collect::<Vec<(&GroupKey, &RingBuffer)>>() .iter() .sorted_by(|a, b| b.1.cmp(&a.1)) // see impl Ord for RingBuffer .take((self.lines - 7/* lines used for header */) as usize) { stdout .queue(cursor::MoveToNextLine(1))? .queue(Print(self.table_line(key, ring_buffer, elapsed)))?; } } } // mutex on ring_buffer stdout.flush()?; Ok(()) } fn handle_event(&mut self) -> Result<bool, Error> { use crossterm::event::Event::{Key, Mouse, Resize}; use crossterm::event::KeyCode::Char; use crossterm::event::{KeyEvent, KeyModifiers}; match crossterm::event::read()? { Key(KeyEvent { code: Char('q'), .. }) | Key(KeyEvent { modifiers: KeyModifiers::CONTROL, code: Char('c'), }) => return Ok(false), Key(KeyEvent { code: Char('o'), .. }) => { self.toggle_sort(); } Key(KeyEvent { code: Char('g'), .. }) => { self.toggle_group(); } Key(event) => info!("{:?}", event), Mouse(event) => info!("{:?}", event), Resize(cols, lines) => { self.lines = lines; self.cols = cols; } } Ok(true) } fn toggle_sort(&self) { self.options.lock().unwrap().toggle_sort(); } fn toggle_group(&self) { let mut o = self.options.lock().unwrap(); let group_by = o.toggle_group(); drop(o); self.ring_buffer.lock().unwrap().regroup(group_by); } fn table_line(&self, key: &GroupKey, rr: &RingBuffer, elapsed: f64) -> String { let reqs = rr.stats.global.requests as f64; format!( "{reqs:6} {reqs_per_sec:6.2} {hb:>6} {hb_per_sec:>6} {key:width$}", width = (self.cols - 30) as usize, reqs = reqs, reqs_per_sec = reqs / elapsed, hb = Self::humansize(rr.stats.global.bytes as f64), hb_per_sec = Self::humansize(rr.stats.global.bytes as f64 / elapsed), key = key ) } // All: 638924 reqs ( 182.65/sec) 3433539K ( 981.6K/sec) ( 5.4K/req) fn primary_stats_line(&self, stats: &Stats, elapsed: f64, alltime: bool) -> String { let reqs_non_zero = std::cmp::max(stats.global.requests, 1) as f64; let reqs = stats.global.requests as f64; let header = if alltime { "All:" } else { "R:" }; format!( "{header:5} {bold}{reqs:>space$}{reset} ({reqs_per_sec:6.2}/sec) {bold}{hb:>space$}{reset} ({hb_per_sec}/sec) {hb_per_req}/req", bold = Attribute::Bold, reset = Attribute::Reset, space = ((self.cols - 50) / 2) as usize, header = header, reqs = reqs, reqs_per_sec = reqs / elapsed, hb = Self::humansize(stats.global.bytes as f64), hb_per_sec = Self::humansize(stats.global.bytes as f64 / elapsed), hb_per_req = Self::humansize((stats.global.bytes as f64) / reqs_non_zero) ) } // 2xx: 455415 (71.3%) 3xx: 175745 (27.5%) 4xx: 7746 ( 1.2%) 5xx: 10 ( 0.0%) fn per_code_line(&self, stats: &Stats) -> String { let stats_2 = &stats.by_status_code[2]; let stats_3 = &stats.by_status_code[3]; let stats_4 = &stats.by_status_code[4]; let stats_5 = &stats.by_status_code[5]; // closure to reduce some duplication for some munging below let c = |rb_stats: &crate::stats::Counters| -> (f64, usize) { // avoid divide by zero if there's no requests yet let pct = if stats.global.requests > 0 { 100.0 * (rb_stats.requests as f64 / stats.global.requests as f64) } else { 0.0 }; // intelligent dp detection: eg 2.34%, 10.5%, 100% let dp = if (pct - 100.0).abs() < f64::EPSILON { 0 } else if pct < 10.0 { 2 } else { 1 }; (pct, dp) }; let (code_2_pct, code_2_dp) = c(stats_2); let (code_3_pct, code_3_dp) = c(stats_3); let (code_4_pct, code_4_dp) = c(stats_4); let (code_5_pct, code_5_dp) = c(stats_5); format!( "2xx: {code_2:space$} ({code_2_pct:4.code_2_dp$}%) 3xx: {code_3:space$} ({code_3_pct:4.code_3_dp$}%) 4xx: {code_4:space$} ({code_4_pct:4.code_4_dp$}%) 5xx: {code_5:space$} ({code_5_pct:4.code_5_dp$}%)", space = ((self.cols - 55) / 4) as usize, code_2 = stats_2.requests, code_2_dp = code_2_dp, code_2_pct = code_2_pct, code_3 = stats_3.requests, code_3_dp = code_3_dp, code_3_pct = code_3_pct, code_4 = stats_4.requests, code_4_dp = code_4_dp, code_4_pct = code_4_pct, code_5 = stats_5.requests, code_5_dp = code_5_dp, code_5_pct = code_5_pct, ) } fn humansize(bytes: f64) -> String { if bytes > 1073741824.0 { format!("{:6.2}G", (bytes / 1073741824.0)) } else if bytes > 1048576.0 { format!("{:6.2}M", (bytes / 1048576.00)) } else if bytes > 1024.0 { format!("{:6.2}K", (bytes / 1024.0)) } else { format!("{:6.0}B", bytes) } } } struct StartedAt(std::time::Instant); impl StartedAt { fn elapsed(&self) -> std::time::Duration { self.0.elapsed() } } impl std::fmt::Display for StartedAt { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let duration = self.0.elapsed().as_secs(); let hours = duration / 3600; let minutes = duration % 3600 / 60; let seconds = duration % 60; write!(f, "runtime: ")?; if hours > 0 { write!(f, "{}h ", hours)?; } if hours > 0 || minutes > 0 { write!(f, "{}m ", minutes)?; } write!(f, "{}s", seconds) } }
.queue(cursor::MoveTo(0, 0))?
random_line_split
util.js
function
(n) { return !isNaN(parseFloat(n)) && isFinite(n); } (function($) { $.loading = function(show) { var hide = (show === false); // no parameter assumes 'show' ; false = 'hide' if(!j('#waiting').size()) { j('body').append("<div id='waiting' style='display:none;'><div class='XXui-widget-overlay'></div><div id='' class='progress ui-corner-all'>Loading...<div class='progressbar'></div></div></div>"); //hide = false; } if(hide) { j('#waiting').fadeOut(); } else { j('#waiting .progressbar').progressbar({value: false}); if(!j('#waiting').is(":visible")) { j('#waiting').fadeIn(); } } }; $.fn.ignoreEnter = function() { $(this).keypress(function(e) { if(e.keyCode == 10 || e.keyCode == 13) { e.preventDefault(); } } ); }; $.fn.changeup = function(callback, timeout) { var target = this; if(!timeout) { timeout = 500; } $(this).keyup(function() { clearTimeout($(this).data('keyup_timeout_id')); $(this).data('keyup_timeout_id', setTimeout(function() { callback(target); }, timeout)); }); // also detect paste $(this).bind('paste',function() { clearTimeout($(this).data('keyup_timeout_id')); callback($(this)); }); $(this).change(function() { clearTimeout($(this).data('keyup_timeout_id')); callback($(this)); }); }; $.alert = function(msg,title) { // hide pleasewait abruptly (not animated) j.hidespin(true); j('#alert').html(msg); j('#alert').dialog({modal: true, title: title, resizable: false, width: 400, draggable: false, buttons: [ {text: "OK", click: function() { j('#alert').dialog('close'); } } ] }); }; $.fn.modalcenter = function() { // Also change the height if it's too tall, ie content added. j(this).unbind('dialogopen.modalcenter'); j(this).bind('dialogopen.modalcenter', function() { // call after open. since math matters. // //var buffer = 10; var dialogHeight = j(this).closest('.ui-dialog').height(); // if modal not opened yet. // Reset modal height to 'auto' so there's no scrollbars. so height() is accurate to real height.... j(this).height('auto'); var windowHeight = $(window).height(); var modalHeight = j(this).height(); // consider titlebar var titleHeight = j(this).parent().find('.ui-dialog-titlebar').height(); // consider any buttons, too. var buttonsHeight = j(this).parent().find('.ui-dialog-buttonpane').height(); var buffer = 50; if(titleHeight) { buffer += titleHeight; } if(buttonsHeight) { buffer += buttonsHeight; } if(windowHeight < modalHeight + buffer) { j(this).dialog({height: windowHeight - buffer }); } else { j(this).dialog({height: 'auto'}); //j(this).height('auto'); } j(this).modaloption('width', j(this).parent().width()); // ie7 title bar bug fix when width: auto; // // // ONLY show vert scrollbar. j(this).css({overflow: 'hidden', overflowY: 'auto'}); // Also adjust width so no horiz scrollbars, in case new content sticks over edge... //ji(this).width'div.ui-dialog').width(j('div.ui-dialog').get(0).scrollWidth+5); j(this).modaloption('position','center'); }); if($(this).modalopened()) { j(this).trigger('dialogopen.modalcenter'); } }; $.fn.modalopened = function() // saner. initialized may not mean open. async lag. { return $(this).parent('.ui-dialog').size() && $(this).dialog('isOpen'); }; $.fn.modaloption = function(key, value) { //console.log("SETTING="+key+"="+value); var container = this; if($(container).modalopened()) { //console.log("TRIGOPT="+key+", V="+value); $(container).dialog('option',key,value); } else { // console.log("DELAYED OPT="+key+", V="+value); } j(container).bind('dialogopen', function() { j(container).dialog('option', key, value); }); }; $.fn.modal = function(e) // called on LINK, so we get url properly.... { var title = j(this).prop('title'); if(!title) { title = j(this).text(); } var href = j(this).prop('href'); e.stopPropagation(); j('#modal').load(href, null, function(response) { //console.log(response); j('#modal').dialog({ width: 'auto', // should keep width w/o scrollbar? title: title, modal: true, resizable: false, draggable: false, open: function(event, ui) { j('#modal').modalcenter(); j('#modal').trigger('modalready'); }, buttons: null }); //console.log(j('#modal .resizable')); j('#modal .resizable').resize(function(e) { // need to put on inner container so scrollbar doesn't trigger. //console.log("RESIZED!"); var top = j('#modal').scrollTop(); j('#modal').modalcenter(); // Keep scroll position the same. j('#modal').scrollTop(top); }); j('.ui-widget-overlay').click(function() { j('#modal').dialog('close'); }); }) return false; }; $.fn.modalready = function(callback) // Call if already opened, since won't get called otherwise. { if(typeof callback == 'function') // register at same time. { j(this).bind('modalready', callback); } if(j(this).modalopened()) { j(this).trigger('modalready'); } }; $.fn.closemodal = $.fn.modalclose = function(callback) { var container = this; if(callback && !isNumeric(callback)) { $(container).bind('dialogclose', function() { if(typeof callback == 'function') { callback(); } else if (typeof callback == 'string') { // from controller. eval(callback); } }); } $(container).dialog('close'); $(container).dialog('destroy'); $(container).html(''); // clear content. //e.stopPropagation(); return false; }; $.fn.formerror = function(msg, before) { if(!msg) { msg = 'Missing Information'; } var id = j(this).attr('id'); var errorid = id+"_error"; if(!j("#"+errorid).size()) { var container = "<div id='"+errorid+"' class='formerror'></div>"; if(before) { j('#'+id).before(container); } else { // Place as last sibling, since may be stuff to right. j('#'+id).parent().append(container); } } //console.log(msg); j('#'+errorid).html(msg).show(); return false; }; $.fn.originalShow = $.fn.show; $.fn.originalHide = $.fn.hide; $.fn.show = function(speed, call) { $(this).trigger('show'); return $(this).originalShow(speed,call); }; $.fn.hide = function(speed, call) { $(this).trigger('hide'); return $(this).originalHide(speed,call); }; $.fn.ghostable = function(text) { var original = $(this); var overlay = original.clone(); overlay.attr('id', original.attr('id')+"_clone"); overlay.attr('name', '');//original.attr('name')+"_ghostable"); overlay.addClass('ghost'); original.after(overlay); overlay.val(text); overlay.click(function() { overlay.hide(); original.show().change(); original.focus(); }); overlay.select(function() { overlay.hide(); original.show().change(); original.focus(); }); overlay.focus(function() { overlay.hide(); original.show().change(); original.focus(); }); original.blur(function() { if(!original.val()) { overlay.show().change(); original.hide(); } else { original.show(); overlay.hide(); } }); // IF YOU NEED TO CHANGE PROGRAMATICALLY, CALL blur(); SINCE ABOVE CALLS CHANGE() to trigger any previewing //original.change(function() { if(!original.val()) { overlay.show(); original.hide(); } else { original.show(); overlay.hide(); } }); overlay.bind('paste', function() { overlay.val(''); setTimeout(function() { original.val(overlay.val()).show().change(); overlay.val(text).hide(); }, 100) }); // if(original.val()) { overlay.hide(); } else { original.hide(); } }; $.fn.ghostable2 = function(text) // Disabled element that won't submit or be gathered unless custom text. { $(this).wrap("<span class='ghostable_wrapper'/>"); var $original = $(this); var $parent = $original.closest('span.ghostable_wrapper'); var $overlay = $(this).clone(); $overlay.val(text); //$("<div class='ghostable_overlay'>"+text+"</div>"); // style the overlay $overlay.css({ // position the overlay in the same real estate as the original parent element position: "absolute" , top: $parent.position().top , left: $parent.position().left , width: $parent.outerWidth() , height: $parent.outerHeight() , zIndex: 10000 // IE needs a color in order for the layer to respond to mouse events , backgroundColor: "#fff" // set the opacity to 0, so the element is transparent , opacity: 0 }) // attach the click behavior .click(function (){ $self.show(); // Show $(this).hide(); // Hide me, focus on original. // trigger the original event handler //return $self.trigger("click"); }); // add the overlay to the page $parent.append($overlay); }; $.spin = function() { var container = $('body #spin'); if(!container.size()) { container = $("<div id='spin' style='display: none;'></div>"); $('body').append(container); $(container).click(function() { $.hidespin(); }); } $(container).show(); $(container).spin('large'); }; $.unspin = $.hidespin = function(immediate) { if(immediate) { $('body #spin').hide(); } else { $('body #spin').fadeOut('slow'); } }; $.fn.spin = function(opts, color) { var defaults = { color: '#FFF' }; var presets = { "tiny": { lines: 8, length: 2, width: 2, radius: 3 }, "small": { lines: 8, length: 4, width: 3, radius: 5 }, "large": { lines: 10, length: 8, width: 4, radius: 8 } }; if (typeof Spinner != 'undefined') { return this.each(function() { var $this = $(this), data = $this.data(); if (data.spinner) { data.spinner.stop(); delete data.spinner; } if (opts !== false) { if (typeof opts === "string") { if (opts in presets) { opts = $.extend(defaults, presets[opts]); } else { opts = defaults;//{}; } if (color) { opts.color = color; } } data.spinner = new Spinner($.extend({color: $this.css('color')}, opts)).spin(this); } }); } else { throw "Spinner class not available."; } }; $(document).ready(function() { // Remove all form error content. j('.formerror').html('').hide(); j(document).on('click', 'a.modal', function(e) { j(this).modal(e); return false; }); // Handle forms based on modal // so this works on modals even after submitted first time around. j(document).on('submit', '#modal form:not(.nomodal)', function(e) { // as a submit check, respects onSubmit for verify fields,etc. //console.log("AJAX SUBMIT"); j(this).ajaxSubmit({ target: '#modal' }); return false; // prevent default submit to whole page! }); j('#modal form').require_fields(); // Hide flash messages once a form is submitted. j('form').submit(function() { j('#flashMessage').hide(); }); }); String.prototype.nl2br = function() { return this.replace(/\n/g, "<br/>\n"); }; })(jQuery);
isNumeric
identifier_name
util.js
function isNumeric(n) { return !isNaN(parseFloat(n)) && isFinite(n); } (function($) { $.loading = function(show) { var hide = (show === false); // no parameter assumes 'show' ; false = 'hide' if(!j('#waiting').size()) { j('body').append("<div id='waiting' style='display:none;'><div class='XXui-widget-overlay'></div><div id='' class='progress ui-corner-all'>Loading...<div class='progressbar'></div></div></div>"); //hide = false; } if(hide) { j('#waiting').fadeOut(); } else { j('#waiting .progressbar').progressbar({value: false}); if(!j('#waiting').is(":visible")) { j('#waiting').fadeIn(); } } }; $.fn.ignoreEnter = function() { $(this).keypress(function(e) { if(e.keyCode == 10 || e.keyCode == 13) { e.preventDefault(); } } ); }; $.fn.changeup = function(callback, timeout) { var target = this; if(!timeout) { timeout = 500; } $(this).keyup(function() { clearTimeout($(this).data('keyup_timeout_id')); $(this).data('keyup_timeout_id', setTimeout(function() { callback(target); }, timeout)); }); // also detect paste $(this).bind('paste',function() { clearTimeout($(this).data('keyup_timeout_id')); callback($(this)); }); $(this).change(function() { clearTimeout($(this).data('keyup_timeout_id')); callback($(this)); }); }; $.alert = function(msg,title) { // hide pleasewait abruptly (not animated) j.hidespin(true); j('#alert').html(msg); j('#alert').dialog({modal: true, title: title, resizable: false, width: 400, draggable: false, buttons: [ {text: "OK", click: function() { j('#alert').dialog('close'); } } ] }); }; $.fn.modalcenter = function() { // Also change the height if it's too tall, ie content added. j(this).unbind('dialogopen.modalcenter'); j(this).bind('dialogopen.modalcenter', function() { // call after open. since math matters. // //var buffer = 10; var dialogHeight = j(this).closest('.ui-dialog').height(); // if modal not opened yet. // Reset modal height to 'auto' so there's no scrollbars. so height() is accurate to real height.... j(this).height('auto'); var windowHeight = $(window).height(); var modalHeight = j(this).height(); // consider titlebar var titleHeight = j(this).parent().find('.ui-dialog-titlebar').height(); // consider any buttons, too. var buttonsHeight = j(this).parent().find('.ui-dialog-buttonpane').height(); var buffer = 50; if(titleHeight) { buffer += titleHeight; } if(buttonsHeight) { buffer += buttonsHeight; } if(windowHeight < modalHeight + buffer) { j(this).dialog({height: windowHeight - buffer }); } else { j(this).dialog({height: 'auto'}); //j(this).height('auto'); } j(this).modaloption('width', j(this).parent().width()); // ie7 title bar bug fix when width: auto; // // // ONLY show vert scrollbar. j(this).css({overflow: 'hidden', overflowY: 'auto'}); // Also adjust width so no horiz scrollbars, in case new content sticks over edge... //ji(this).width'div.ui-dialog').width(j('div.ui-dialog').get(0).scrollWidth+5); j(this).modaloption('position','center'); }); if($(this).modalopened()) { j(this).trigger('dialogopen.modalcenter'); } }; $.fn.modalopened = function() // saner. initialized may not mean open. async lag. { return $(this).parent('.ui-dialog').size() && $(this).dialog('isOpen'); }; $.fn.modaloption = function(key, value) { //console.log("SETTING="+key+"="+value); var container = this; if($(container).modalopened()) { //console.log("TRIGOPT="+key+", V="+value); $(container).dialog('option',key,value); } else { // console.log("DELAYED OPT="+key+", V="+value); } j(container).bind('dialogopen', function() { j(container).dialog('option', key, value); }); }; $.fn.modal = function(e) // called on LINK, so we get url properly.... { var title = j(this).prop('title'); if(!title) { title = j(this).text(); } var href = j(this).prop('href'); e.stopPropagation(); j('#modal').load(href, null, function(response) { //console.log(response); j('#modal').dialog({ width: 'auto', // should keep width w/o scrollbar? title: title, modal: true, resizable: false, draggable: false, open: function(event, ui) { j('#modal').modalcenter(); j('#modal').trigger('modalready'); }, buttons: null }); //console.log(j('#modal .resizable')); j('#modal .resizable').resize(function(e) { // need to put on inner container so scrollbar doesn't trigger. //console.log("RESIZED!"); var top = j('#modal').scrollTop(); j('#modal').modalcenter(); // Keep scroll position the same. j('#modal').scrollTop(top); }); j('.ui-widget-overlay').click(function() { j('#modal').dialog('close'); }); }) return false; }; $.fn.modalready = function(callback) // Call if already opened, since won't get called otherwise. { if(typeof callback == 'function') // register at same time. { j(this).bind('modalready', callback); } if(j(this).modalopened()) { j(this).trigger('modalready'); } }; $.fn.closemodal = $.fn.modalclose = function(callback) { var container = this; if(callback && !isNumeric(callback)) { $(container).bind('dialogclose', function() { if(typeof callback == 'function') { callback(); } else if (typeof callback == 'string') { // from controller. eval(callback); } }); } $(container).dialog('close'); $(container).dialog('destroy'); $(container).html(''); // clear content. //e.stopPropagation(); return false; }; $.fn.formerror = function(msg, before) { if(!msg) { msg = 'Missing Information'; } var id = j(this).attr('id'); var errorid = id+"_error"; if(!j("#"+errorid).size()) { var container = "<div id='"+errorid+"' class='formerror'></div>"; if(before) { j('#'+id).before(container); } else { // Place as last sibling, since may be stuff to right. j('#'+id).parent().append(container); } } //console.log(msg); j('#'+errorid).html(msg).show(); return false; }; $.fn.originalShow = $.fn.show; $.fn.originalHide = $.fn.hide; $.fn.show = function(speed, call) { $(this).trigger('show'); return $(this).originalShow(speed,call); }; $.fn.hide = function(speed, call) { $(this).trigger('hide'); return $(this).originalHide(speed,call); }; $.fn.ghostable = function(text) { var original = $(this); var overlay = original.clone(); overlay.attr('id', original.attr('id')+"_clone"); overlay.attr('name', '');//original.attr('name')+"_ghostable"); overlay.addClass('ghost'); original.after(overlay); overlay.val(text); overlay.click(function() { overlay.hide(); original.show().change(); original.focus(); });
overlay.select(function() { overlay.hide(); original.show().change(); original.focus(); }); overlay.focus(function() { overlay.hide(); original.show().change(); original.focus(); }); original.blur(function() { if(!original.val()) { overlay.show().change(); original.hide(); } else { original.show(); overlay.hide(); } }); // IF YOU NEED TO CHANGE PROGRAMATICALLY, CALL blur(); SINCE ABOVE CALLS CHANGE() to trigger any previewing //original.change(function() { if(!original.val()) { overlay.show(); original.hide(); } else { original.show(); overlay.hide(); } }); overlay.bind('paste', function() { overlay.val(''); setTimeout(function() { original.val(overlay.val()).show().change(); overlay.val(text).hide(); }, 100) }); // if(original.val()) { overlay.hide(); } else { original.hide(); } }; $.fn.ghostable2 = function(text) // Disabled element that won't submit or be gathered unless custom text. { $(this).wrap("<span class='ghostable_wrapper'/>"); var $original = $(this); var $parent = $original.closest('span.ghostable_wrapper'); var $overlay = $(this).clone(); $overlay.val(text); //$("<div class='ghostable_overlay'>"+text+"</div>"); // style the overlay $overlay.css({ // position the overlay in the same real estate as the original parent element position: "absolute" , top: $parent.position().top , left: $parent.position().left , width: $parent.outerWidth() , height: $parent.outerHeight() , zIndex: 10000 // IE needs a color in order for the layer to respond to mouse events , backgroundColor: "#fff" // set the opacity to 0, so the element is transparent , opacity: 0 }) // attach the click behavior .click(function (){ $self.show(); // Show $(this).hide(); // Hide me, focus on original. // trigger the original event handler //return $self.trigger("click"); }); // add the overlay to the page $parent.append($overlay); }; $.spin = function() { var container = $('body #spin'); if(!container.size()) { container = $("<div id='spin' style='display: none;'></div>"); $('body').append(container); $(container).click(function() { $.hidespin(); }); } $(container).show(); $(container).spin('large'); }; $.unspin = $.hidespin = function(immediate) { if(immediate) { $('body #spin').hide(); } else { $('body #spin').fadeOut('slow'); } }; $.fn.spin = function(opts, color) { var defaults = { color: '#FFF' }; var presets = { "tiny": { lines: 8, length: 2, width: 2, radius: 3 }, "small": { lines: 8, length: 4, width: 3, radius: 5 }, "large": { lines: 10, length: 8, width: 4, radius: 8 } }; if (typeof Spinner != 'undefined') { return this.each(function() { var $this = $(this), data = $this.data(); if (data.spinner) { data.spinner.stop(); delete data.spinner; } if (opts !== false) { if (typeof opts === "string") { if (opts in presets) { opts = $.extend(defaults, presets[opts]); } else { opts = defaults;//{}; } if (color) { opts.color = color; } } data.spinner = new Spinner($.extend({color: $this.css('color')}, opts)).spin(this); } }); } else { throw "Spinner class not available."; } }; $(document).ready(function() { // Remove all form error content. j('.formerror').html('').hide(); j(document).on('click', 'a.modal', function(e) { j(this).modal(e); return false; }); // Handle forms based on modal // so this works on modals even after submitted first time around. j(document).on('submit', '#modal form:not(.nomodal)', function(e) { // as a submit check, respects onSubmit for verify fields,etc. //console.log("AJAX SUBMIT"); j(this).ajaxSubmit({ target: '#modal' }); return false; // prevent default submit to whole page! }); j('#modal form').require_fields(); // Hide flash messages once a form is submitted. j('form').submit(function() { j('#flashMessage').hide(); }); }); String.prototype.nl2br = function() { return this.replace(/\n/g, "<br/>\n"); }; })(jQuery);
random_line_split
util.js
function isNumeric(n) { return !isNaN(parseFloat(n)) && isFinite(n); } (function($) { $.loading = function(show) { var hide = (show === false); // no parameter assumes 'show' ; false = 'hide' if(!j('#waiting').size()) { j('body').append("<div id='waiting' style='display:none;'><div class='XXui-widget-overlay'></div><div id='' class='progress ui-corner-all'>Loading...<div class='progressbar'></div></div></div>"); //hide = false; } if(hide) { j('#waiting').fadeOut(); } else { j('#waiting .progressbar').progressbar({value: false}); if(!j('#waiting').is(":visible")) { j('#waiting').fadeIn(); } } }; $.fn.ignoreEnter = function() { $(this).keypress(function(e) { if(e.keyCode == 10 || e.keyCode == 13) { e.preventDefault(); } } ); }; $.fn.changeup = function(callback, timeout) { var target = this; if(!timeout) { timeout = 500; } $(this).keyup(function() { clearTimeout($(this).data('keyup_timeout_id')); $(this).data('keyup_timeout_id', setTimeout(function() { callback(target); }, timeout)); }); // also detect paste $(this).bind('paste',function() { clearTimeout($(this).data('keyup_timeout_id')); callback($(this)); }); $(this).change(function() { clearTimeout($(this).data('keyup_timeout_id')); callback($(this)); }); }; $.alert = function(msg,title) { // hide pleasewait abruptly (not animated) j.hidespin(true); j('#alert').html(msg); j('#alert').dialog({modal: true, title: title, resizable: false, width: 400, draggable: false, buttons: [ {text: "OK", click: function() { j('#alert').dialog('close'); } } ] }); }; $.fn.modalcenter = function() { // Also change the height if it's too tall, ie content added. j(this).unbind('dialogopen.modalcenter'); j(this).bind('dialogopen.modalcenter', function() { // call after open. since math matters. // //var buffer = 10; var dialogHeight = j(this).closest('.ui-dialog').height(); // if modal not opened yet. // Reset modal height to 'auto' so there's no scrollbars. so height() is accurate to real height.... j(this).height('auto'); var windowHeight = $(window).height(); var modalHeight = j(this).height(); // consider titlebar var titleHeight = j(this).parent().find('.ui-dialog-titlebar').height(); // consider any buttons, too. var buttonsHeight = j(this).parent().find('.ui-dialog-buttonpane').height(); var buffer = 50; if(titleHeight) { buffer += titleHeight; } if(buttonsHeight) { buffer += buttonsHeight; } if(windowHeight < modalHeight + buffer) { j(this).dialog({height: windowHeight - buffer }); } else { j(this).dialog({height: 'auto'}); //j(this).height('auto'); } j(this).modaloption('width', j(this).parent().width()); // ie7 title bar bug fix when width: auto; // // // ONLY show vert scrollbar. j(this).css({overflow: 'hidden', overflowY: 'auto'}); // Also adjust width so no horiz scrollbars, in case new content sticks over edge... //ji(this).width'div.ui-dialog').width(j('div.ui-dialog').get(0).scrollWidth+5); j(this).modaloption('position','center'); }); if($(this).modalopened()) { j(this).trigger('dialogopen.modalcenter'); } }; $.fn.modalopened = function() // saner. initialized may not mean open. async lag. { return $(this).parent('.ui-dialog').size() && $(this).dialog('isOpen'); }; $.fn.modaloption = function(key, value) { //console.log("SETTING="+key+"="+value); var container = this; if($(container).modalopened()) { //console.log("TRIGOPT="+key+", V="+value); $(container).dialog('option',key,value); } else { // console.log("DELAYED OPT="+key+", V="+value); } j(container).bind('dialogopen', function() { j(container).dialog('option', key, value); }); }; $.fn.modal = function(e) // called on LINK, so we get url properly.... { var title = j(this).prop('title'); if(!title) { title = j(this).text(); } var href = j(this).prop('href'); e.stopPropagation(); j('#modal').load(href, null, function(response) { //console.log(response); j('#modal').dialog({ width: 'auto', // should keep width w/o scrollbar? title: title, modal: true, resizable: false, draggable: false, open: function(event, ui) { j('#modal').modalcenter(); j('#modal').trigger('modalready'); }, buttons: null }); //console.log(j('#modal .resizable')); j('#modal .resizable').resize(function(e) { // need to put on inner container so scrollbar doesn't trigger. //console.log("RESIZED!"); var top = j('#modal').scrollTop(); j('#modal').modalcenter(); // Keep scroll position the same. j('#modal').scrollTop(top); }); j('.ui-widget-overlay').click(function() { j('#modal').dialog('close'); }); }) return false; }; $.fn.modalready = function(callback) // Call if already opened, since won't get called otherwise. { if(typeof callback == 'function') // register at same time. { j(this).bind('modalready', callback); } if(j(this).modalopened()) { j(this).trigger('modalready'); } }; $.fn.closemodal = $.fn.modalclose = function(callback) { var container = this; if(callback && !isNumeric(callback)) { $(container).bind('dialogclose', function() { if(typeof callback == 'function') { callback(); } else if (typeof callback == 'string') { // from controller. eval(callback); } }); } $(container).dialog('close'); $(container).dialog('destroy'); $(container).html(''); // clear content. //e.stopPropagation(); return false; }; $.fn.formerror = function(msg, before) { if(!msg) { msg = 'Missing Information'; } var id = j(this).attr('id'); var errorid = id+"_error"; if(!j("#"+errorid).size()) { var container = "<div id='"+errorid+"' class='formerror'></div>"; if(before) { j('#'+id).before(container); } else { // Place as last sibling, since may be stuff to right. j('#'+id).parent().append(container); } } //console.log(msg); j('#'+errorid).html(msg).show(); return false; }; $.fn.originalShow = $.fn.show; $.fn.originalHide = $.fn.hide; $.fn.show = function(speed, call) { $(this).trigger('show'); return $(this).originalShow(speed,call); }; $.fn.hide = function(speed, call) { $(this).trigger('hide'); return $(this).originalHide(speed,call); }; $.fn.ghostable = function(text) { var original = $(this); var overlay = original.clone(); overlay.attr('id', original.attr('id')+"_clone"); overlay.attr('name', '');//original.attr('name')+"_ghostable"); overlay.addClass('ghost'); original.after(overlay); overlay.val(text); overlay.click(function() { overlay.hide(); original.show().change(); original.focus(); }); overlay.select(function() { overlay.hide(); original.show().change(); original.focus(); }); overlay.focus(function() { overlay.hide(); original.show().change(); original.focus(); }); original.blur(function() { if(!original.val()) { overlay.show().change(); original.hide(); } else { original.show(); overlay.hide(); } }); // IF YOU NEED TO CHANGE PROGRAMATICALLY, CALL blur(); SINCE ABOVE CALLS CHANGE() to trigger any previewing //original.change(function() { if(!original.val()) { overlay.show(); original.hide(); } else { original.show(); overlay.hide(); } }); overlay.bind('paste', function() { overlay.val(''); setTimeout(function() { original.val(overlay.val()).show().change(); overlay.val(text).hide(); }, 100) }); // if(original.val()) { overlay.hide(); } else { original.hide(); } }; $.fn.ghostable2 = function(text) // Disabled element that won't submit or be gathered unless custom text. { $(this).wrap("<span class='ghostable_wrapper'/>"); var $original = $(this); var $parent = $original.closest('span.ghostable_wrapper'); var $overlay = $(this).clone(); $overlay.val(text); //$("<div class='ghostable_overlay'>"+text+"</div>"); // style the overlay $overlay.css({ // position the overlay in the same real estate as the original parent element position: "absolute" , top: $parent.position().top , left: $parent.position().left , width: $parent.outerWidth() , height: $parent.outerHeight() , zIndex: 10000 // IE needs a color in order for the layer to respond to mouse events , backgroundColor: "#fff" // set the opacity to 0, so the element is transparent , opacity: 0 }) // attach the click behavior .click(function (){ $self.show(); // Show $(this).hide(); // Hide me, focus on original. // trigger the original event handler //return $self.trigger("click"); }); // add the overlay to the page $parent.append($overlay); }; $.spin = function() { var container = $('body #spin'); if(!container.size()) { container = $("<div id='spin' style='display: none;'></div>"); $('body').append(container); $(container).click(function() { $.hidespin(); }); } $(container).show(); $(container).spin('large'); }; $.unspin = $.hidespin = function(immediate) { if(immediate) { $('body #spin').hide(); } else { $('body #spin').fadeOut('slow'); } }; $.fn.spin = function(opts, color) { var defaults = { color: '#FFF' }; var presets = { "tiny": { lines: 8, length: 2, width: 2, radius: 3 }, "small": { lines: 8, length: 4, width: 3, radius: 5 }, "large": { lines: 10, length: 8, width: 4, radius: 8 } }; if (typeof Spinner != 'undefined') { return this.each(function() { var $this = $(this), data = $this.data(); if (data.spinner) { data.spinner.stop(); delete data.spinner; } if (opts !== false) { if (typeof opts === "string") { if (opts in presets) { opts = $.extend(defaults, presets[opts]); } else
if (color) { opts.color = color; } } data.spinner = new Spinner($.extend({color: $this.css('color')}, opts)).spin(this); } }); } else { throw "Spinner class not available."; } }; $(document).ready(function() { // Remove all form error content. j('.formerror').html('').hide(); j(document).on('click', 'a.modal', function(e) { j(this).modal(e); return false; }); // Handle forms based on modal // so this works on modals even after submitted first time around. j(document).on('submit', '#modal form:not(.nomodal)', function(e) { // as a submit check, respects onSubmit for verify fields,etc. //console.log("AJAX SUBMIT"); j(this).ajaxSubmit({ target: '#modal' }); return false; // prevent default submit to whole page! }); j('#modal form').require_fields(); // Hide flash messages once a form is submitted. j('form').submit(function() { j('#flashMessage').hide(); }); }); String.prototype.nl2br = function() { return this.replace(/\n/g, "<br/>\n"); }; })(jQuery);
{ opts = defaults;//{}; }
conditional_block
util.js
function isNumeric(n)
(function($) { $.loading = function(show) { var hide = (show === false); // no parameter assumes 'show' ; false = 'hide' if(!j('#waiting').size()) { j('body').append("<div id='waiting' style='display:none;'><div class='XXui-widget-overlay'></div><div id='' class='progress ui-corner-all'>Loading...<div class='progressbar'></div></div></div>"); //hide = false; } if(hide) { j('#waiting').fadeOut(); } else { j('#waiting .progressbar').progressbar({value: false}); if(!j('#waiting').is(":visible")) { j('#waiting').fadeIn(); } } }; $.fn.ignoreEnter = function() { $(this).keypress(function(e) { if(e.keyCode == 10 || e.keyCode == 13) { e.preventDefault(); } } ); }; $.fn.changeup = function(callback, timeout) { var target = this; if(!timeout) { timeout = 500; } $(this).keyup(function() { clearTimeout($(this).data('keyup_timeout_id')); $(this).data('keyup_timeout_id', setTimeout(function() { callback(target); }, timeout)); }); // also detect paste $(this).bind('paste',function() { clearTimeout($(this).data('keyup_timeout_id')); callback($(this)); }); $(this).change(function() { clearTimeout($(this).data('keyup_timeout_id')); callback($(this)); }); }; $.alert = function(msg,title) { // hide pleasewait abruptly (not animated) j.hidespin(true); j('#alert').html(msg); j('#alert').dialog({modal: true, title: title, resizable: false, width: 400, draggable: false, buttons: [ {text: "OK", click: function() { j('#alert').dialog('close'); } } ] }); }; $.fn.modalcenter = function() { // Also change the height if it's too tall, ie content added. j(this).unbind('dialogopen.modalcenter'); j(this).bind('dialogopen.modalcenter', function() { // call after open. since math matters. // //var buffer = 10; var dialogHeight = j(this).closest('.ui-dialog').height(); // if modal not opened yet. // Reset modal height to 'auto' so there's no scrollbars. so height() is accurate to real height.... j(this).height('auto'); var windowHeight = $(window).height(); var modalHeight = j(this).height(); // consider titlebar var titleHeight = j(this).parent().find('.ui-dialog-titlebar').height(); // consider any buttons, too. var buttonsHeight = j(this).parent().find('.ui-dialog-buttonpane').height(); var buffer = 50; if(titleHeight) { buffer += titleHeight; } if(buttonsHeight) { buffer += buttonsHeight; } if(windowHeight < modalHeight + buffer) { j(this).dialog({height: windowHeight - buffer }); } else { j(this).dialog({height: 'auto'}); //j(this).height('auto'); } j(this).modaloption('width', j(this).parent().width()); // ie7 title bar bug fix when width: auto; // // // ONLY show vert scrollbar. j(this).css({overflow: 'hidden', overflowY: 'auto'}); // Also adjust width so no horiz scrollbars, in case new content sticks over edge... //ji(this).width'div.ui-dialog').width(j('div.ui-dialog').get(0).scrollWidth+5); j(this).modaloption('position','center'); }); if($(this).modalopened()) { j(this).trigger('dialogopen.modalcenter'); } }; $.fn.modalopened = function() // saner. initialized may not mean open. async lag. { return $(this).parent('.ui-dialog').size() && $(this).dialog('isOpen'); }; $.fn.modaloption = function(key, value) { //console.log("SETTING="+key+"="+value); var container = this; if($(container).modalopened()) { //console.log("TRIGOPT="+key+", V="+value); $(container).dialog('option',key,value); } else { // console.log("DELAYED OPT="+key+", V="+value); } j(container).bind('dialogopen', function() { j(container).dialog('option', key, value); }); }; $.fn.modal = function(e) // called on LINK, so we get url properly.... { var title = j(this).prop('title'); if(!title) { title = j(this).text(); } var href = j(this).prop('href'); e.stopPropagation(); j('#modal').load(href, null, function(response) { //console.log(response); j('#modal').dialog({ width: 'auto', // should keep width w/o scrollbar? title: title, modal: true, resizable: false, draggable: false, open: function(event, ui) { j('#modal').modalcenter(); j('#modal').trigger('modalready'); }, buttons: null }); //console.log(j('#modal .resizable')); j('#modal .resizable').resize(function(e) { // need to put on inner container so scrollbar doesn't trigger. //console.log("RESIZED!"); var top = j('#modal').scrollTop(); j('#modal').modalcenter(); // Keep scroll position the same. j('#modal').scrollTop(top); }); j('.ui-widget-overlay').click(function() { j('#modal').dialog('close'); }); }) return false; }; $.fn.modalready = function(callback) // Call if already opened, since won't get called otherwise. { if(typeof callback == 'function') // register at same time. { j(this).bind('modalready', callback); } if(j(this).modalopened()) { j(this).trigger('modalready'); } }; $.fn.closemodal = $.fn.modalclose = function(callback) { var container = this; if(callback && !isNumeric(callback)) { $(container).bind('dialogclose', function() { if(typeof callback == 'function') { callback(); } else if (typeof callback == 'string') { // from controller. eval(callback); } }); } $(container).dialog('close'); $(container).dialog('destroy'); $(container).html(''); // clear content. //e.stopPropagation(); return false; }; $.fn.formerror = function(msg, before) { if(!msg) { msg = 'Missing Information'; } var id = j(this).attr('id'); var errorid = id+"_error"; if(!j("#"+errorid).size()) { var container = "<div id='"+errorid+"' class='formerror'></div>"; if(before) { j('#'+id).before(container); } else { // Place as last sibling, since may be stuff to right. j('#'+id).parent().append(container); } } //console.log(msg); j('#'+errorid).html(msg).show(); return false; }; $.fn.originalShow = $.fn.show; $.fn.originalHide = $.fn.hide; $.fn.show = function(speed, call) { $(this).trigger('show'); return $(this).originalShow(speed,call); }; $.fn.hide = function(speed, call) { $(this).trigger('hide'); return $(this).originalHide(speed,call); }; $.fn.ghostable = function(text) { var original = $(this); var overlay = original.clone(); overlay.attr('id', original.attr('id')+"_clone"); overlay.attr('name', '');//original.attr('name')+"_ghostable"); overlay.addClass('ghost'); original.after(overlay); overlay.val(text); overlay.click(function() { overlay.hide(); original.show().change(); original.focus(); }); overlay.select(function() { overlay.hide(); original.show().change(); original.focus(); }); overlay.focus(function() { overlay.hide(); original.show().change(); original.focus(); }); original.blur(function() { if(!original.val()) { overlay.show().change(); original.hide(); } else { original.show(); overlay.hide(); } }); // IF YOU NEED TO CHANGE PROGRAMATICALLY, CALL blur(); SINCE ABOVE CALLS CHANGE() to trigger any previewing //original.change(function() { if(!original.val()) { overlay.show(); original.hide(); } else { original.show(); overlay.hide(); } }); overlay.bind('paste', function() { overlay.val(''); setTimeout(function() { original.val(overlay.val()).show().change(); overlay.val(text).hide(); }, 100) }); // if(original.val()) { overlay.hide(); } else { original.hide(); } }; $.fn.ghostable2 = function(text) // Disabled element that won't submit or be gathered unless custom text. { $(this).wrap("<span class='ghostable_wrapper'/>"); var $original = $(this); var $parent = $original.closest('span.ghostable_wrapper'); var $overlay = $(this).clone(); $overlay.val(text); //$("<div class='ghostable_overlay'>"+text+"</div>"); // style the overlay $overlay.css({ // position the overlay in the same real estate as the original parent element position: "absolute" , top: $parent.position().top , left: $parent.position().left , width: $parent.outerWidth() , height: $parent.outerHeight() , zIndex: 10000 // IE needs a color in order for the layer to respond to mouse events , backgroundColor: "#fff" // set the opacity to 0, so the element is transparent , opacity: 0 }) // attach the click behavior .click(function (){ $self.show(); // Show $(this).hide(); // Hide me, focus on original. // trigger the original event handler //return $self.trigger("click"); }); // add the overlay to the page $parent.append($overlay); }; $.spin = function() { var container = $('body #spin'); if(!container.size()) { container = $("<div id='spin' style='display: none;'></div>"); $('body').append(container); $(container).click(function() { $.hidespin(); }); } $(container).show(); $(container).spin('large'); }; $.unspin = $.hidespin = function(immediate) { if(immediate) { $('body #spin').hide(); } else { $('body #spin').fadeOut('slow'); } }; $.fn.spin = function(opts, color) { var defaults = { color: '#FFF' }; var presets = { "tiny": { lines: 8, length: 2, width: 2, radius: 3 }, "small": { lines: 8, length: 4, width: 3, radius: 5 }, "large": { lines: 10, length: 8, width: 4, radius: 8 } }; if (typeof Spinner != 'undefined') { return this.each(function() { var $this = $(this), data = $this.data(); if (data.spinner) { data.spinner.stop(); delete data.spinner; } if (opts !== false) { if (typeof opts === "string") { if (opts in presets) { opts = $.extend(defaults, presets[opts]); } else { opts = defaults;//{}; } if (color) { opts.color = color; } } data.spinner = new Spinner($.extend({color: $this.css('color')}, opts)).spin(this); } }); } else { throw "Spinner class not available."; } }; $(document).ready(function() { // Remove all form error content. j('.formerror').html('').hide(); j(document).on('click', 'a.modal', function(e) { j(this).modal(e); return false; }); // Handle forms based on modal // so this works on modals even after submitted first time around. j(document).on('submit', '#modal form:not(.nomodal)', function(e) { // as a submit check, respects onSubmit for verify fields,etc. //console.log("AJAX SUBMIT"); j(this).ajaxSubmit({ target: '#modal' }); return false; // prevent default submit to whole page! }); j('#modal form').require_fields(); // Hide flash messages once a form is submitted. j('form').submit(function() { j('#flashMessage').hide(); }); }); String.prototype.nl2br = function() { return this.replace(/\n/g, "<br/>\n"); }; })(jQuery);
{ return !isNaN(parseFloat(n)) && isFinite(n); }
identifier_body
core.py
# /usr/bin/env python2.7 # -*- mode: python -*- # ============================================================================= # @@-COPYRIGHT-START-@@ # # Copyright (c) 2017-2018, Qualcomm Innovation Center, Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # SPDX-License-Identifier: BSD-3-Clause # # @@-COPYRIGHT-END-@@ # ============================================================================= """ Set of core utilities shared between quantization and svd code """ import re import tensorflow as tf from aimet_tensorflow.utils import constants from aimet_tensorflow.common import op_defs from aimet_common.utils import AimetLogger _BIAS_TYPES = ['Add', 'BiasAdd'] # Ops to skip quantization on, eg backprop, etc _SKIPPED_PREFIXES = ('gradients/', 'RMSProp/', 'Adagrad/', 'Const_', 'HistogramSummary', 'ScalarSummary', 'save/', 'truncated_normal', 'Adam') # Valid activation ops for quantization end points. _ACTIVATION_OP_SUFFIXES = ['/Relu6', '/Relu', '/Identity'] # Regular expression for recognizing nodes that are part of batch norm group. _BATCHNORM_RE = re.compile(r'^(.*)/BatchNorm/batchnorm') _OP_MAP = op_defs.default_op_map class OpQuery: """ Class for query a graph's operations and related data. """ def __init__(self, graph, op_map=None, ops_to_ignore=None, strict=True): """ Constructor :param graph: The graph to search :param op_map: The map of operations used to identify op sequences as "one op". The default op_map used is defined in op_deps.py. Please refer to that format for passing a custom op_map. :param ops_to_ignore: List of ops to ignore :param strict: If strict mode is set to True queries will only return the last ops at the end of well known "op layers" as defined by the op_map. When False, queries will return ops at the end of well known layers and, in addition, all ops which are not "known". Eg If you have a list of ops in a graph like: Conv2D, BiasAdd, WeirdOp Strict mode will return ["BiasAdd"] since it knows that Conv2D+BiasAdd are one logical "layer". When strict mode is disabled it will return ["BiasAdd", "WeirdOp"] :param debug: Whether to enable debug messages or not. """ self._log = AimetLogger.get_area_logger(AimetLogger.LogAreas.Utils) self._graph = graph self._strict = strict if op_map: self._op_map = op_map else: self._op_map = _OP_MAP if ops_to_ignore: self._ops_to_ignore = ops_to_ignore else: self._ops_to_ignore = [] self._trained_vars = graph.get_collection(tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES) @staticmethod def _is_op_with_weights(op):
@classmethod def get_weights_for_op(cls, op): """ Get the weight tensor for a given op :param op: TF op :return: Weight tensor for the op """ weights = None if cls._is_op_with_weights(op): weights = op.inputs[constants.OP_WEIGHT_INDICES[op.type]] return weights @staticmethod def get_bias_for_op(op): """ Get bias tensor for the given op :param op: TF op :return: Bias tensor for the op """ bias = None if op.type in _BIAS_TYPES: bias = op.inputs[constants.OP_WEIGHT_INDICES[op.type]] return bias def get_weight_ops(self, ops=None, skip_bias_op=False): """ Get all ops that contain weights. If a list of ops is passed search only ops from this list. Return the sequenced list of weight ops always with Conv/FC first, followed by the bias op, if present. :param ops: List of ops to use (optional) :param ops: If bias op has to be skipped (optional) :return: """ if not ops: ops = self._graph.get_operations() ops_with_weights = [] for op in ops: if self._is_op_with_weights(op): self._log.debug('Found op w/weights: %s', op.name) ops_with_weights.append(op) if not skip_bias_op and self._is_op_with_weights(op): for consumer in op.outputs[0].consumers(): # Ignore Reshape as it can be placed between MatMul and BiasAdd on Dense layer of Transformer if consumer.type in ['Reshape'] and len(consumer.outputs[0].consumers()) == 1: consumer = consumer.outputs[0].consumers()[0] if consumer.type in _BIAS_TYPES: self._log.debug('Found op w/bias: %s', consumer.name+'('+consumer.type+')') ops_with_weights.append(consumer) reduced_list = [x for x in ops_with_weights if not x.name.startswith(tuple(self._ops_to_ignore))] return reduced_list @staticmethod def get_weight_inputs(ops): """ Given a list of ops, returns a corresponding list of the weight indexes for their inputs :param ops: List of TF ops :return: """ indices = list() for op in ops: if op.type not in constants.OP_WEIGHT_INDICES: raise ValueError('Op type: '+op.type+' does not contain weights!') indices.append(constants.OP_WEIGHT_INDICES[op.type]) return indices def _match_ops(self, current_op, candidate_op_list, matched_ops, visited_ops): """ Recursive function that helps traverse a network and find matching ops :param current_op: Current op to traverse downstream from :param candidate_op_list: Current list of candidate ops that may result in a match :param matched_ops: List of already found matched_ops :param visited_ops: List of all ops that have been visited (to cut short duplicate traversals) :return: """ if any(x in current_op.name for x in _SKIPPED_PREFIXES): return matched_ops self._log.debug('Processing op: %s (%s) w/current list=%s', current_op.name, current_op.type, candidate_op_list) candidate_op_list.append(current_op) match_len, max_len = op_defs.check_match(candidate_op_list, op_map=self._op_map) self._log.debug('Got match_len: %s and max_len: %s', str(match_len), str(max_len)) if match_len != 0 and match_len == max_len: # Matched the maximum sequence possible matched_ops.append(current_op) op_type_list = [list_op.type for list_op in candidate_op_list] self._log.info('Found op match w/new op: %s and sequence: %s', current_op.name, str(op_type_list)) candidate_op_list = [] elif match_len == 0: # A list length > 1 means the current op_list was a match but not the newly added op. Save the previous last # op from the list if len(candidate_op_list) > 1: # Check if indeed the previous op_list is a match if op_defs.does_sequence_match(candidate_op_list[:-1], op_map=self._op_map): matched_op = candidate_op_list[-2] matched_ops.append(matched_op) op_type_list = [list_op.type for list_op in candidate_op_list[:-1]] self._log.info('Found op match: %s and sequence: %s', matched_op.name, str(op_type_list)) # Test to see if the current op is a match by itself candidate_op_list = [] matched_ops = self._match_ops(current_op, candidate_op_list, matched_ops, visited_ops) return matched_ops # No match, reset the list candidate_op_list = [] # There was some match, but not the max match possible. Continue drilling through the # outputs to the next ops for tensor in current_op.outputs: for consumer in tensor.consumers(): if consumer not in visited_ops: visited_ops.add(consumer) self._log.info('Adding to visited_logs: %s', consumer.name) matched_ops = self._match_ops(consumer, candidate_op_list, matched_ops, visited_ops) return matched_ops def get_known_ops(self, inputs): """ Given a set of inputs, find all the ops in the network that are from the "well known" op collections defined in the OpQuery's op_map :param inputs: List of input ops :return: List of all consumer ops in the network which are well-known """ if not inputs: raise ValueError('No input op names provided!') input_ops = [self._graph.get_operation_by_name(name) for name in inputs] matched_ops = [[] for _ in range(len(input_ops))] for op_index, op in enumerate(input_ops): self._log.info('Matching ops starting from: %s', op.name) matched_ops[op_index] = self._match_ops(op, [], [], set()) self._log.info('Found %i known op groups', len(matched_ops[op_index])) # Filter dups and merge the newly matched ops # Todo: Is there a more pythonic and faster way to detect duplicates? unique_ops = {} for ops in matched_ops: for op in ops: unique_ops[op.name] = op # Filter out the entries that should be ignored list_of_unique_ops = list(unique_ops.values()) reduced_list = [x for x in list_of_unique_ops if not x.name.startswith(tuple(self._ops_to_ignore))] return reduced_list
""" Checks if a given op has weights :param op: TF op :return: True, if op has weights, False otherwise """ return (op.type in constants.OP_WEIGHT_TYPES and not op.name.startswith(_SKIPPED_PREFIXES))
identifier_body
core.py
# /usr/bin/env python2.7 # -*- mode: python -*- # ============================================================================= # @@-COPYRIGHT-START-@@ # # Copyright (c) 2017-2018, Qualcomm Innovation Center, Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # SPDX-License-Identifier: BSD-3-Clause # # @@-COPYRIGHT-END-@@ # ============================================================================= """ Set of core utilities shared between quantization and svd code """ import re import tensorflow as tf from aimet_tensorflow.utils import constants from aimet_tensorflow.common import op_defs from aimet_common.utils import AimetLogger _BIAS_TYPES = ['Add', 'BiasAdd'] # Ops to skip quantization on, eg backprop, etc _SKIPPED_PREFIXES = ('gradients/', 'RMSProp/', 'Adagrad/', 'Const_', 'HistogramSummary', 'ScalarSummary', 'save/', 'truncated_normal', 'Adam') # Valid activation ops for quantization end points. _ACTIVATION_OP_SUFFIXES = ['/Relu6', '/Relu', '/Identity'] # Regular expression for recognizing nodes that are part of batch norm group. _BATCHNORM_RE = re.compile(r'^(.*)/BatchNorm/batchnorm') _OP_MAP = op_defs.default_op_map class OpQuery: """ Class for query a graph's operations and related data. """ def __init__(self, graph, op_map=None, ops_to_ignore=None, strict=True): """ Constructor :param graph: The graph to search :param op_map: The map of operations used to identify op sequences as "one op". The default op_map used is defined in op_deps.py. Please refer to that format for passing a custom op_map. :param ops_to_ignore: List of ops to ignore :param strict: If strict mode is set to True queries will only return the last ops at the end of well known "op layers" as defined by the op_map. When False, queries will return ops at the end of well known layers and, in addition, all ops which are not "known". Eg If you have a list of ops in a graph like: Conv2D, BiasAdd, WeirdOp Strict mode will return ["BiasAdd"] since it knows that Conv2D+BiasAdd are one logical "layer". When strict mode is disabled it will return ["BiasAdd", "WeirdOp"] :param debug: Whether to enable debug messages or not. """ self._log = AimetLogger.get_area_logger(AimetLogger.LogAreas.Utils) self._graph = graph self._strict = strict if op_map: self._op_map = op_map else: self._op_map = _OP_MAP if ops_to_ignore: self._ops_to_ignore = ops_to_ignore else: self._ops_to_ignore = [] self._trained_vars = graph.get_collection(tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES) @staticmethod def _is_op_with_weights(op): """ Checks if a given op has weights :param op: TF op :return: True, if op has weights, False otherwise """ return (op.type in constants.OP_WEIGHT_TYPES and not op.name.startswith(_SKIPPED_PREFIXES)) @classmethod def get_weights_for_op(cls, op): """ Get the weight tensor for a given op :param op: TF op :return: Weight tensor for the op """ weights = None if cls._is_op_with_weights(op): weights = op.inputs[constants.OP_WEIGHT_INDICES[op.type]] return weights @staticmethod def get_bias_for_op(op): """ Get bias tensor for the given op :param op: TF op :return: Bias tensor for the op """ bias = None if op.type in _BIAS_TYPES:
return bias def get_weight_ops(self, ops=None, skip_bias_op=False): """ Get all ops that contain weights. If a list of ops is passed search only ops from this list. Return the sequenced list of weight ops always with Conv/FC first, followed by the bias op, if present. :param ops: List of ops to use (optional) :param ops: If bias op has to be skipped (optional) :return: """ if not ops: ops = self._graph.get_operations() ops_with_weights = [] for op in ops: if self._is_op_with_weights(op): self._log.debug('Found op w/weights: %s', op.name) ops_with_weights.append(op) if not skip_bias_op and self._is_op_with_weights(op): for consumer in op.outputs[0].consumers(): # Ignore Reshape as it can be placed between MatMul and BiasAdd on Dense layer of Transformer if consumer.type in ['Reshape'] and len(consumer.outputs[0].consumers()) == 1: consumer = consumer.outputs[0].consumers()[0] if consumer.type in _BIAS_TYPES: self._log.debug('Found op w/bias: %s', consumer.name+'('+consumer.type+')') ops_with_weights.append(consumer) reduced_list = [x for x in ops_with_weights if not x.name.startswith(tuple(self._ops_to_ignore))] return reduced_list @staticmethod def get_weight_inputs(ops): """ Given a list of ops, returns a corresponding list of the weight indexes for their inputs :param ops: List of TF ops :return: """ indices = list() for op in ops: if op.type not in constants.OP_WEIGHT_INDICES: raise ValueError('Op type: '+op.type+' does not contain weights!') indices.append(constants.OP_WEIGHT_INDICES[op.type]) return indices def _match_ops(self, current_op, candidate_op_list, matched_ops, visited_ops): """ Recursive function that helps traverse a network and find matching ops :param current_op: Current op to traverse downstream from :param candidate_op_list: Current list of candidate ops that may result in a match :param matched_ops: List of already found matched_ops :param visited_ops: List of all ops that have been visited (to cut short duplicate traversals) :return: """ if any(x in current_op.name for x in _SKIPPED_PREFIXES): return matched_ops self._log.debug('Processing op: %s (%s) w/current list=%s', current_op.name, current_op.type, candidate_op_list) candidate_op_list.append(current_op) match_len, max_len = op_defs.check_match(candidate_op_list, op_map=self._op_map) self._log.debug('Got match_len: %s and max_len: %s', str(match_len), str(max_len)) if match_len != 0 and match_len == max_len: # Matched the maximum sequence possible matched_ops.append(current_op) op_type_list = [list_op.type for list_op in candidate_op_list] self._log.info('Found op match w/new op: %s and sequence: %s', current_op.name, str(op_type_list)) candidate_op_list = [] elif match_len == 0: # A list length > 1 means the current op_list was a match but not the newly added op. Save the previous last # op from the list if len(candidate_op_list) > 1: # Check if indeed the previous op_list is a match if op_defs.does_sequence_match(candidate_op_list[:-1], op_map=self._op_map): matched_op = candidate_op_list[-2] matched_ops.append(matched_op) op_type_list = [list_op.type for list_op in candidate_op_list[:-1]] self._log.info('Found op match: %s and sequence: %s', matched_op.name, str(op_type_list)) # Test to see if the current op is a match by itself candidate_op_list = [] matched_ops = self._match_ops(current_op, candidate_op_list, matched_ops, visited_ops) return matched_ops # No match, reset the list candidate_op_list = [] # There was some match, but not the max match possible. Continue drilling through the # outputs to the next ops for tensor in current_op.outputs: for consumer in tensor.consumers(): if consumer not in visited_ops: visited_ops.add(consumer) self._log.info('Adding to visited_logs: %s', consumer.name) matched_ops = self._match_ops(consumer, candidate_op_list, matched_ops, visited_ops) return matched_ops def get_known_ops(self, inputs): """ Given a set of inputs, find all the ops in the network that are from the "well known" op collections defined in the OpQuery's op_map :param inputs: List of input ops :return: List of all consumer ops in the network which are well-known """ if not inputs: raise ValueError('No input op names provided!') input_ops = [self._graph.get_operation_by_name(name) for name in inputs] matched_ops = [[] for _ in range(len(input_ops))] for op_index, op in enumerate(input_ops): self._log.info('Matching ops starting from: %s', op.name) matched_ops[op_index] = self._match_ops(op, [], [], set()) self._log.info('Found %i known op groups', len(matched_ops[op_index])) # Filter dups and merge the newly matched ops # Todo: Is there a more pythonic and faster way to detect duplicates? unique_ops = {} for ops in matched_ops: for op in ops: unique_ops[op.name] = op # Filter out the entries that should be ignored list_of_unique_ops = list(unique_ops.values()) reduced_list = [x for x in list_of_unique_ops if not x.name.startswith(tuple(self._ops_to_ignore))] return reduced_list
bias = op.inputs[constants.OP_WEIGHT_INDICES[op.type]]
conditional_block