repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
odacremolbap/kubernetes
pkg/kubectl/cmd/apply/apply.go
29783
/* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apply import ( "encoding/json" "fmt" "io" "strings" "time" "github.com/jonboulle/clockwork" "github.com/spf13/cobra" corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/api/meta" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/jsonmergepatch" "k8s.io/apimachinery/pkg/util/mergepatch" "k8s.io/apimachinery/pkg/util/sets" "k8s.io/apimachinery/pkg/util/strategicpatch" "k8s.io/apimachinery/pkg/util/wait" "k8s.io/cli-runtime/pkg/genericclioptions" "k8s.io/cli-runtime/pkg/genericclioptions/printers" "k8s.io/cli-runtime/pkg/genericclioptions/resource" "k8s.io/client-go/discovery" "k8s.io/client-go/dynamic" "k8s.io/klog" oapi "k8s.io/kube-openapi/pkg/util/proto" "k8s.io/kubernetes/pkg/kubectl" "k8s.io/kubernetes/pkg/kubectl/cmd/delete" cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" "k8s.io/kubernetes/pkg/kubectl/cmd/util/openapi" "k8s.io/kubernetes/pkg/kubectl/scheme" "k8s.io/kubernetes/pkg/kubectl/util/i18n" "k8s.io/kubernetes/pkg/kubectl/util/templates" "k8s.io/kubernetes/pkg/kubectl/validation" ) type ApplyOptions struct { RecordFlags *genericclioptions.RecordFlags Recorder genericclioptions.Recorder PrintFlags *genericclioptions.PrintFlags ToPrinter func(string) (printers.ResourcePrinter, error) DeleteFlags *delete.DeleteFlags DeleteOptions *delete.DeleteOptions Selector string DryRun bool ServerDryRun bool Prune bool PruneResources []pruneResource cmdBaseName string All bool Overwrite bool OpenAPIPatch bool PruneWhitelist []string ShouldIncludeUninitialized bool Validator validation.Schema Builder *resource.Builder Mapper meta.RESTMapper DynamicClient dynamic.Interface DiscoveryClient discovery.DiscoveryInterface OpenAPISchema openapi.Resources Namespace string EnforceNamespace bool genericclioptions.IOStreams } const ( // maxPatchRetry is the maximum number of conflicts retry for during a patch operation before returning failure maxPatchRetry = 5 // backOffPeriod is the period to back off when apply patch resutls in error. backOffPeriod = 1 * time.Second // how many times we can retry before back off triesBeforeBackOff = 1 ) var ( applyLong = templates.LongDesc(i18n.T(` Apply a configuration to a resource by filename or stdin. The resource name must be specified. This resource will be created if it doesn't exist yet. To use 'apply', always create the resource initially with either 'apply' or 'create --save-config'. JSON and YAML formats are accepted. Alpha Disclaimer: the --prune functionality is not yet complete. Do not use unless you are aware of what the current state is. See https://issues.k8s.io/34274.`)) applyExample = templates.Examples(i18n.T(` # Apply the configuration in pod.json to a pod. kubectl apply -f ./pod.json # Apply the JSON passed into stdin to a pod. cat pod.json | kubectl apply -f - # Note: --prune is still in Alpha # Apply the configuration in manifest.yaml that matches label app=nginx and delete all the other resources that are not in the file and match label app=nginx. kubectl apply --prune -f manifest.yaml -l app=nginx # Apply the configuration in manifest.yaml and delete all the other configmaps that are not in the file. kubectl apply --prune -f manifest.yaml --all --prune-whitelist=core/v1/ConfigMap`)) warningNoLastAppliedConfigAnnotation = "Warning: %[1]s apply should be used on resource created by either %[1]s create --save-config or %[1]s apply\n" ) func NewApplyOptions(ioStreams genericclioptions.IOStreams) *ApplyOptions { return &ApplyOptions{ RecordFlags: genericclioptions.NewRecordFlags(), DeleteFlags: delete.NewDeleteFlags("that contains the configuration to apply"), PrintFlags: genericclioptions.NewPrintFlags("created").WithTypeSetter(scheme.Scheme), Overwrite: true, OpenAPIPatch: true, Recorder: genericclioptions.NoopRecorder{}, IOStreams: ioStreams, } } // NewCmdApply creates the `apply` command func NewCmdApply(baseName string, f cmdutil.Factory, ioStreams genericclioptions.IOStreams) *cobra.Command { o := NewApplyOptions(ioStreams) // Store baseName for use in printing warnings / messages involving the base command name. // This is useful for downstream command that wrap this one. o.cmdBaseName = baseName cmd := &cobra.Command{ Use: "apply -f FILENAME", DisableFlagsInUseLine: true, Short: i18n.T("Apply a configuration to a resource by filename or stdin"), Long: applyLong, Example: applyExample, Run: func(cmd *cobra.Command, args []string) { cmdutil.CheckErr(o.Complete(f, cmd)) cmdutil.CheckErr(validateArgs(cmd, args)) cmdutil.CheckErr(validatePruneAll(o.Prune, o.All, o.Selector)) cmdutil.CheckErr(o.Run()) }, } // bind flag structs o.DeleteFlags.AddFlags(cmd) o.RecordFlags.AddFlags(cmd) o.PrintFlags.AddFlags(cmd) cmd.MarkFlagRequired("filename") cmd.Flags().BoolVar(&o.Overwrite, "overwrite", o.Overwrite, "Automatically resolve conflicts between the modified and live configuration by using values from the modified configuration") cmd.Flags().BoolVar(&o.Prune, "prune", o.Prune, "Automatically delete resource objects, including the uninitialized ones, that do not appear in the configs and are created by either apply or create --save-config. Should be used with either -l or --all.") cmdutil.AddValidateFlags(cmd) cmd.Flags().StringVarP(&o.Selector, "selector", "l", o.Selector, "Selector (label query) to filter on, supports '=', '==', and '!='.(e.g. -l key1=value1,key2=value2)") cmd.Flags().BoolVar(&o.All, "all", o.All, "Select all resources in the namespace of the specified resource types.") cmd.Flags().StringArrayVar(&o.PruneWhitelist, "prune-whitelist", o.PruneWhitelist, "Overwrite the default whitelist with <group/version/kind> for --prune") cmd.Flags().BoolVar(&o.OpenAPIPatch, "openapi-patch", o.OpenAPIPatch, "If true, use openapi to calculate diff when the openapi presents and the resource can be found in the openapi spec. Otherwise, fall back to use baked-in types.") cmd.Flags().BoolVar(&o.ServerDryRun, "server-dry-run", o.ServerDryRun, "If true, request will be sent to server with dry-run flag, which means the modifications won't be persisted. This is an alpha feature and flag.") cmdutil.AddDryRunFlag(cmd) cmdutil.AddIncludeUninitializedFlag(cmd) // apply subcommands cmd.AddCommand(NewCmdApplyViewLastApplied(f, ioStreams)) cmd.AddCommand(NewCmdApplySetLastApplied(f, ioStreams)) cmd.AddCommand(NewCmdApplyEditLastApplied(f, ioStreams)) return cmd } func (o *ApplyOptions) Complete(f cmdutil.Factory, cmd *cobra.Command) error { o.DryRun = cmdutil.GetDryRunFlag(cmd) if o.DryRun && o.ServerDryRun { return fmt.Errorf("--dry-run and --server-dry-run can't be used together") } // allow for a success message operation to be specified at print time o.ToPrinter = func(operation string) (printers.ResourcePrinter, error) { o.PrintFlags.NamePrintFlags.Operation = operation if o.DryRun { o.PrintFlags.Complete("%s (dry run)") } if o.ServerDryRun { o.PrintFlags.Complete("%s (server dry run)") } return o.PrintFlags.ToPrinter() } var err error o.RecordFlags.Complete(cmd) o.Recorder, err = o.RecordFlags.ToRecorder() if err != nil { return err } o.DiscoveryClient, err = f.ToDiscoveryClient() if err != nil { return err } dynamicClient, err := f.DynamicClient() if err != nil { return err } o.DeleteOptions = o.DeleteFlags.ToOptions(dynamicClient, o.IOStreams) o.ShouldIncludeUninitialized = cmdutil.ShouldIncludeUninitialized(cmd, o.Prune) o.OpenAPISchema, _ = f.OpenAPISchema() o.Validator, err = f.Validator(cmdutil.GetFlagBool(cmd, "validate")) o.Builder = f.NewBuilder() o.Mapper, err = f.ToRESTMapper() if err != nil { return err } o.DynamicClient, err = f.DynamicClient() if err != nil { return err } o.Namespace, o.EnforceNamespace, err = f.ToRawKubeConfigLoader().Namespace() if err != nil { return err } return nil } func validateArgs(cmd *cobra.Command, args []string) error { if len(args) != 0 { return cmdutil.UsageErrorf(cmd, "Unexpected args: %v", args) } return nil } func validatePruneAll(prune, all bool, selector string) error { if all && len(selector) > 0 { return fmt.Errorf("cannot set --all and --selector at the same time") } if prune && !all && selector == "" { return fmt.Errorf("all resources selected for prune without explicitly passing --all. To prune all resources, pass the --all flag. If you did not mean to prune all resources, specify a label selector") } return nil } func parsePruneResources(mapper meta.RESTMapper, gvks []string) ([]pruneResource, error) { pruneResources := []pruneResource{} for _, groupVersionKind := range gvks { gvk := strings.Split(groupVersionKind, "/") if len(gvk) != 3 { return nil, fmt.Errorf("invalid GroupVersionKind format: %v, please follow <group/version/kind>", groupVersionKind) } if gvk[0] == "core" { gvk[0] = "" } mapping, err := mapper.RESTMapping(schema.GroupKind{Group: gvk[0], Kind: gvk[2]}, gvk[1]) if err != nil { return pruneResources, err } var namespaced bool namespaceScope := mapping.Scope.Name() switch namespaceScope { case meta.RESTScopeNameNamespace: namespaced = true case meta.RESTScopeNameRoot: namespaced = false default: return pruneResources, fmt.Errorf("Unknown namespace scope: %q", namespaceScope) } pruneResources = append(pruneResources, pruneResource{gvk[0], gvk[1], gvk[2], namespaced}) } return pruneResources, nil } func (o *ApplyOptions) Run() error { var openapiSchema openapi.Resources if o.OpenAPIPatch { openapiSchema = o.OpenAPISchema } dryRunVerifier := &DryRunVerifier{ Finder: cmdutil.NewCRDFinder(cmdutil.CRDFromDynamic(o.DynamicClient)), OpenAPIGetter: o.DiscoveryClient, } // include the uninitialized objects by default if --prune is true // unless explicitly set --include-uninitialized=false r := o.Builder. Unstructured(). Schema(o.Validator). ContinueOnError(). NamespaceParam(o.Namespace).DefaultNamespace(). FilenameParam(o.EnforceNamespace, &o.DeleteOptions.FilenameOptions). LabelSelectorParam(o.Selector). IncludeUninitialized(o.ShouldIncludeUninitialized). Flatten(). Do() if err := r.Err(); err != nil { return err } var err error if o.Prune { o.PruneResources, err = parsePruneResources(o.Mapper, o.PruneWhitelist) if err != nil { return err } } output := *o.PrintFlags.OutputFormat shortOutput := output == "name" visitedUids := sets.NewString() visitedNamespaces := sets.NewString() var objs []runtime.Object count := 0 err = r.Visit(func(info *resource.Info, err error) error { if err != nil { return err } // If server-dry-run is requested but the type doesn't support it, fail right away. if o.ServerDryRun { if err := dryRunVerifier.HasSupport(info.Mapping.GroupVersionKind); err != nil { return err } } if info.Namespaced() { visitedNamespaces.Insert(info.Namespace) } if err := o.Recorder.Record(info.Object); err != nil { klog.V(4).Infof("error recording current command: %v", err) } // Get the modified configuration of the object. Embed the result // as an annotation in the modified configuration, so that it will appear // in the patch sent to the server. modified, err := kubectl.GetModifiedConfiguration(info.Object, true, unstructured.UnstructuredJSONScheme) if err != nil { return cmdutil.AddSourceToErr(fmt.Sprintf("retrieving modified configuration from:\n%s\nfor:", info.String()), info.Source, err) } // Print object only if output format other than "name" is specified printObject := len(output) > 0 && !shortOutput if err := info.Get(); err != nil { if !errors.IsNotFound(err) { return cmdutil.AddSourceToErr(fmt.Sprintf("retrieving current configuration of:\n%s\nfrom server for:", info.String()), info.Source, err) } // Create the resource if it doesn't exist // First, update the annotation used by kubectl apply if err := kubectl.CreateApplyAnnotation(info.Object, unstructured.UnstructuredJSONScheme); err != nil { return cmdutil.AddSourceToErr("creating", info.Source, err) } if !o.DryRun { // Then create the resource and skip the three-way merge options := metav1.CreateOptions{} if o.ServerDryRun { options.DryRun = []string{metav1.DryRunAll} } obj, err := resource.NewHelper(info.Client, info.Mapping).Create(info.Namespace, true, info.Object, &options) if err != nil { return cmdutil.AddSourceToErr("creating", info.Source, err) } info.Refresh(obj, true) } metadata, err := meta.Accessor(info.Object) if err != nil { return err } visitedUids.Insert(string(metadata.GetUID())) count++ if printObject { objs = append(objs, info.Object) return nil } printer, err := o.ToPrinter("created") if err != nil { return err } return printer.PrintObj(info.Object, o.Out) } metadata, err := meta.Accessor(info.Object) if err != nil { return err } visitedUids.Insert(string(metadata.GetUID())) if !o.DryRun { annotationMap := metadata.GetAnnotations() if _, ok := annotationMap[corev1.LastAppliedConfigAnnotation]; !ok { fmt.Fprintf(o.ErrOut, warningNoLastAppliedConfigAnnotation, o.cmdBaseName) } helper := resource.NewHelper(info.Client, info.Mapping) patcher := &Patcher{ Mapping: info.Mapping, Helper: helper, DynamicClient: o.DynamicClient, Overwrite: o.Overwrite, BackOff: clockwork.NewRealClock(), Force: o.DeleteOptions.ForceDeletion, Cascade: o.DeleteOptions.Cascade, Timeout: o.DeleteOptions.Timeout, GracePeriod: o.DeleteOptions.GracePeriod, ServerDryRun: o.ServerDryRun, OpenapiSchema: openapiSchema, Retries: maxPatchRetry, } patchBytes, patchedObject, err := patcher.Patch(info.Object, modified, info.Source, info.Namespace, info.Name, o.ErrOut) if err != nil { return cmdutil.AddSourceToErr(fmt.Sprintf("applying patch:\n%s\nto:\n%v\nfor:", patchBytes, info), info.Source, err) } info.Refresh(patchedObject, true) if string(patchBytes) == "{}" && !printObject { count++ printer, err := o.ToPrinter("unchanged") if err != nil { return err } return printer.PrintObj(info.Object, o.Out) } } count++ if printObject { objs = append(objs, info.Object) return nil } printer, err := o.ToPrinter("configured") if err != nil { return err } return printer.PrintObj(info.Object, o.Out) }) if err != nil { return err } if count == 0 { return fmt.Errorf("no objects passed to apply") } // print objects if len(objs) > 0 { printer, err := o.ToPrinter("") if err != nil { return err } objToPrint := objs[0] if len(objs) > 1 { list := &corev1.List{ TypeMeta: metav1.TypeMeta{ Kind: "List", APIVersion: "v1", }, ListMeta: metav1.ListMeta{}, } if err := meta.SetList(list, objs); err != nil { return err } objToPrint = list } if err := printer.PrintObj(objToPrint, o.Out); err != nil { return err } } if !o.Prune { return nil } p := pruner{ mapper: o.Mapper, dynamicClient: o.DynamicClient, labelSelector: o.Selector, visitedUids: visitedUids, cascade: o.DeleteOptions.Cascade, dryRun: o.DryRun, serverDryRun: o.ServerDryRun, gracePeriod: o.DeleteOptions.GracePeriod, toPrinter: o.ToPrinter, out: o.Out, } namespacedRESTMappings, nonNamespacedRESTMappings, err := getRESTMappings(o.Mapper, &(o.PruneResources)) if err != nil { return fmt.Errorf("error retrieving RESTMappings to prune: %v", err) } for n := range visitedNamespaces { for _, m := range namespacedRESTMappings { if err := p.prune(n, m, o.ShouldIncludeUninitialized); err != nil { return fmt.Errorf("error pruning namespaced object %v: %v", m.GroupVersionKind, err) } } } for _, m := range nonNamespacedRESTMappings { if err := p.prune(metav1.NamespaceNone, m, o.ShouldIncludeUninitialized); err != nil { return fmt.Errorf("error pruning nonNamespaced object %v: %v", m.GroupVersionKind, err) } } return nil } type pruneResource struct { group string version string kind string namespaced bool } func (pr pruneResource) String() string { return fmt.Sprintf("%v/%v, Kind=%v, Namespaced=%v", pr.group, pr.version, pr.kind, pr.namespaced) } func getRESTMappings(mapper meta.RESTMapper, pruneResources *[]pruneResource) (namespaced, nonNamespaced []*meta.RESTMapping, err error) { if len(*pruneResources) == 0 { // default whitelist // TODO: need to handle the older api versions - e.g. v1beta1 jobs. Github issue: #35991 *pruneResources = []pruneResource{ {"", "v1", "ConfigMap", true}, {"", "v1", "Endpoints", true}, {"", "v1", "Namespace", false}, {"", "v1", "PersistentVolumeClaim", true}, {"", "v1", "PersistentVolume", false}, {"", "v1", "Pod", true}, {"", "v1", "ReplicationController", true}, {"", "v1", "Secret", true}, {"", "v1", "Service", true}, {"batch", "v1", "Job", true}, {"batch", "v1beta1", "CronJob", true}, {"extensions", "v1beta1", "DaemonSet", true}, {"extensions", "v1beta1", "Deployment", true}, {"extensions", "v1beta1", "Ingress", true}, {"extensions", "v1beta1", "ReplicaSet", true}, {"apps", "v1beta1", "StatefulSet", true}, {"apps", "v1beta1", "Deployment", true}, } } for _, resource := range *pruneResources { addedMapping, err := mapper.RESTMapping(schema.GroupKind{Group: resource.group, Kind: resource.kind}, resource.version) if err != nil { return nil, nil, fmt.Errorf("invalid resource %v: %v", resource, err) } if resource.namespaced { namespaced = append(namespaced, addedMapping) } else { nonNamespaced = append(nonNamespaced, addedMapping) } } return namespaced, nonNamespaced, nil } type pruner struct { mapper meta.RESTMapper dynamicClient dynamic.Interface visitedUids sets.String labelSelector string fieldSelector string cascade bool serverDryRun bool dryRun bool gracePeriod int toPrinter func(string) (printers.ResourcePrinter, error) out io.Writer } func (p *pruner) prune(namespace string, mapping *meta.RESTMapping, includeUninitialized bool) error { objList, err := p.dynamicClient.Resource(mapping.Resource). Namespace(namespace). List(metav1.ListOptions{ LabelSelector: p.labelSelector, FieldSelector: p.fieldSelector, IncludeUninitialized: includeUninitialized, }) if err != nil { return err } objs, err := meta.ExtractList(objList) if err != nil { return err } for _, obj := range objs { metadata, err := meta.Accessor(obj) if err != nil { return err } annots := metadata.GetAnnotations() if _, ok := annots[corev1.LastAppliedConfigAnnotation]; !ok { // don't prune resources not created with apply continue } uid := metadata.GetUID() if p.visitedUids.Has(string(uid)) { continue } name := metadata.GetName() if !p.dryRun { if err := p.delete(namespace, name, mapping); err != nil { return err } } printer, err := p.toPrinter("pruned") if err != nil { return err } printer.PrintObj(obj, p.out) } return nil } func (p *pruner) delete(namespace, name string, mapping *meta.RESTMapping) error { return runDelete(namespace, name, mapping, p.dynamicClient, p.cascade, p.gracePeriod, p.serverDryRun) } func runDelete(namespace, name string, mapping *meta.RESTMapping, c dynamic.Interface, cascade bool, gracePeriod int, serverDryRun bool) error { options := &metav1.DeleteOptions{} if gracePeriod >= 0 { options = metav1.NewDeleteOptions(int64(gracePeriod)) } if serverDryRun { options.DryRun = []string{metav1.DryRunAll} } policy := metav1.DeletePropagationForeground if !cascade { policy = metav1.DeletePropagationOrphan } options.PropagationPolicy = &policy return c.Resource(mapping.Resource).Namespace(namespace).Delete(name, options) } func (p *Patcher) delete(namespace, name string) error { return runDelete(namespace, name, p.Mapping, p.DynamicClient, p.Cascade, p.GracePeriod, p.ServerDryRun) } type Patcher struct { Mapping *meta.RESTMapping Helper *resource.Helper DynamicClient dynamic.Interface Overwrite bool BackOff clockwork.Clock Force bool Cascade bool Timeout time.Duration GracePeriod int ServerDryRun bool // If set, forces the patch against a specific resourceVersion ResourceVersion *string // Number of retries to make if the patch fails with conflict Retries int OpenapiSchema openapi.Resources } // DryRunVerifier verifies if a given group-version-kind supports DryRun // against the current server. Sending dryRun requests to apiserver that // don't support it will result in objects being unwillingly persisted. // // It reads the OpenAPI to see if the given GVK supports dryRun. If the // GVK can not be found, we assume that CRDs will have the same level of // support as "namespaces", and non-CRDs will not be supported. We // delay the check for CRDs as much as possible though, since it // requires an extra round-trip to the server. type DryRunVerifier struct { Finder cmdutil.CRDFinder OpenAPIGetter discovery.OpenAPISchemaInterface } // HasSupport verifies if the given gvk supports DryRun. An error is // returned if it doesn't. func (v *DryRunVerifier) HasSupport(gvk schema.GroupVersionKind) error { oapi, err := v.OpenAPIGetter.OpenAPISchema() if err != nil { return fmt.Errorf("failed to download openapi: %v", err) } supports, err := openapi.SupportsDryRun(oapi, gvk) if err != nil { // We assume that we couldn't find the type, then check for namespace: supports, _ = openapi.SupportsDryRun(oapi, schema.GroupVersionKind{Group: "", Version: "v1", Kind: "Namespace"}) // If namespace supports dryRun, then we will support dryRun for CRDs only. if supports { supports, err = v.Finder.HasCRD(gvk.GroupKind()) if err != nil { return fmt.Errorf("failed to check CRD: %v", err) } } } if !supports { return fmt.Errorf("%v doesn't support dry-run", gvk) } return nil } func addResourceVersion(patch []byte, rv string) ([]byte, error) { var patchMap map[string]interface{} err := json.Unmarshal(patch, &patchMap) if err != nil { return nil, err } u := unstructured.Unstructured{Object: patchMap} a, err := meta.Accessor(&u) if err != nil { return nil, err } a.SetResourceVersion(rv) return json.Marshal(patchMap) } func (p *Patcher) patchSimple(obj runtime.Object, modified []byte, source, namespace, name string, errOut io.Writer) ([]byte, runtime.Object, error) { // Serialize the current configuration of the object from the server. current, err := runtime.Encode(unstructured.UnstructuredJSONScheme, obj) if err != nil { return nil, nil, cmdutil.AddSourceToErr(fmt.Sprintf("serializing current configuration from:\n%v\nfor:", obj), source, err) } // Retrieve the original configuration of the object from the annotation. original, err := kubectl.GetOriginalConfiguration(obj) if err != nil { return nil, nil, cmdutil.AddSourceToErr(fmt.Sprintf("retrieving original configuration from:\n%v\nfor:", obj), source, err) } var patchType types.PatchType var patch []byte var lookupPatchMeta strategicpatch.LookupPatchMeta var schema oapi.Schema createPatchErrFormat := "creating patch with:\noriginal:\n%s\nmodified:\n%s\ncurrent:\n%s\nfor:" // Create the versioned struct from the type defined in the restmapping // (which is the API version we'll be submitting the patch to) versionedObject, err := scheme.Scheme.New(p.Mapping.GroupVersionKind) switch { case runtime.IsNotRegisteredError(err): // fall back to generic JSON merge patch patchType = types.MergePatchType preconditions := []mergepatch.PreconditionFunc{mergepatch.RequireKeyUnchanged("apiVersion"), mergepatch.RequireKeyUnchanged("kind"), mergepatch.RequireMetadataKeyUnchanged("name")} patch, err = jsonmergepatch.CreateThreeWayJSONMergePatch(original, modified, current, preconditions...) if err != nil { if mergepatch.IsPreconditionFailed(err) { return nil, nil, fmt.Errorf("%s", "At least one of apiVersion, kind and name was changed") } return nil, nil, cmdutil.AddSourceToErr(fmt.Sprintf(createPatchErrFormat, original, modified, current), source, err) } case err != nil: return nil, nil, cmdutil.AddSourceToErr(fmt.Sprintf("getting instance of versioned object for %v:", p.Mapping.GroupVersionKind), source, err) case err == nil: // Compute a three way strategic merge patch to send to server. patchType = types.StrategicMergePatchType // Try to use openapi first if the openapi spec is available and can successfully calculate the patch. // Otherwise, fall back to baked-in types. if p.OpenapiSchema != nil { if schema = p.OpenapiSchema.LookupResource(p.Mapping.GroupVersionKind); schema != nil { lookupPatchMeta = strategicpatch.PatchMetaFromOpenAPI{Schema: schema} if openapiPatch, err := strategicpatch.CreateThreeWayMergePatch(original, modified, current, lookupPatchMeta, p.Overwrite); err != nil { fmt.Fprintf(errOut, "warning: error calculating patch from openapi spec: %v\n", err) } else { patchType = types.StrategicMergePatchType patch = openapiPatch } } } if patch == nil { lookupPatchMeta, err = strategicpatch.NewPatchMetaFromStruct(versionedObject) if err != nil { return nil, nil, cmdutil.AddSourceToErr(fmt.Sprintf(createPatchErrFormat, original, modified, current), source, err) } patch, err = strategicpatch.CreateThreeWayMergePatch(original, modified, current, lookupPatchMeta, p.Overwrite) if err != nil { return nil, nil, cmdutil.AddSourceToErr(fmt.Sprintf(createPatchErrFormat, original, modified, current), source, err) } } } if string(patch) == "{}" { return patch, obj, nil } if p.ResourceVersion != nil { patch, err = addResourceVersion(patch, *p.ResourceVersion) if err != nil { return nil, nil, cmdutil.AddSourceToErr("Failed to insert resourceVersion in patch", source, err) } } options := metav1.UpdateOptions{} if p.ServerDryRun { options.DryRun = []string{metav1.DryRunAll} } patchedObj, err := p.Helper.Patch(namespace, name, patchType, patch, &options) return patch, patchedObj, err } func (p *Patcher) Patch(current runtime.Object, modified []byte, source, namespace, name string, errOut io.Writer) ([]byte, runtime.Object, error) { var getErr error patchBytes, patchObject, err := p.patchSimple(current, modified, source, namespace, name, errOut) if p.Retries == 0 { p.Retries = maxPatchRetry } for i := 1; i <= p.Retries && errors.IsConflict(err); i++ { if i > triesBeforeBackOff { p.BackOff.Sleep(backOffPeriod) } current, getErr = p.Helper.Get(namespace, name, false) if getErr != nil { return nil, nil, getErr } patchBytes, patchObject, err = p.patchSimple(current, modified, source, namespace, name, errOut) } if err != nil && (errors.IsConflict(err) || errors.IsInvalid(err)) && p.Force { patchBytes, patchObject, err = p.deleteAndCreate(current, modified, namespace, name) } return patchBytes, patchObject, err } func (p *Patcher) deleteAndCreate(original runtime.Object, modified []byte, namespace, name string) ([]byte, runtime.Object, error) { if err := p.delete(namespace, name); err != nil { return modified, nil, err } // TODO: use wait if err := wait.PollImmediate(1*time.Second, p.Timeout, func() (bool, error) { if _, err := p.Helper.Get(namespace, name, false); !errors.IsNotFound(err) { return false, err } return true, nil }); err != nil { return modified, nil, err } versionedObject, _, err := unstructured.UnstructuredJSONScheme.Decode(modified, nil, nil) if err != nil { return modified, nil, err } options := metav1.CreateOptions{} if p.ServerDryRun { options.DryRun = []string{metav1.DryRunAll} } createdObject, err := p.Helper.Create(namespace, true, versionedObject, &options) if err != nil { // restore the original object if we fail to create the new one // but still propagate and advertise error to user recreated, recreateErr := p.Helper.Create(namespace, true, original, &options) if recreateErr != nil { err = fmt.Errorf("An error occurred force-replacing the existing object with the newly provided one:\n\n%v.\n\nAdditionally, an error occurred attempting to restore the original object:\n\n%v", err, recreateErr) } else { createdObject = recreated } } return modified, createdObject, err }
apache-2.0
Luckyion/SpeechIntelligence
SpeechIntelligence/src/me/videa/base/functions/StartSpecificApplication.java
1285
package me.videa.base.functions; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.drm.DrmStore.Action; import android.net.Uri; public class StartSpecificApplication { Context mContext; public StartSpecificApplication(Context context) { // TODO Auto-generated constructor stub this.mContext = context; } /** * 根据包名启动应用程序 * @param packageName 包名称 */ public void startSpecificApplication(String packageName){ Intent mIntent = new Intent(); PackageManager packageManager = mContext.getPackageManager(); packageManager.getLaunchIntentForPackage(packageName); mContext.startActivity(mIntent); } /** * 根据包名启动应用程序 * @param packageName 包名称 */ public void startSpecificApplication(String pkg, String cls){ Intent intentPhone = new Intent(); ComponentName comp = new ComponentName(pkg, cls); intentPhone.setComponent(comp); intentPhone.setAction("android.intent.action.VIEW"); intentPhone.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS); mContext.startActivity(intentPhone); } }
apache-2.0
Gugli/Openfire
src/plugins/hazelcast/src/java/org/jivesoftware/openfire/plugin/session/IncomingServerSessionTask.java
2682
/* * Copyright (C) 2007-2009 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.plugin.session; import org.jivesoftware.openfire.SessionManager; import org.jivesoftware.openfire.StreamID; import org.jivesoftware.openfire.session.IncomingServerSession; import org.jivesoftware.openfire.session.Session; import org.jivesoftware.openfire.spi.BasicStreamIDFactory; import org.jivesoftware.util.cache.ExternalizableUtil; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; /** * Class that defines possible remote operations that could be performed * on remote incoming server sessions. * * @author Gaston Dombiak */ public class IncomingServerSessionTask extends RemoteSessionTask { private StreamID streamID; public IncomingServerSessionTask() { super(); } protected IncomingServerSessionTask(Operation operation, StreamID streamID) { super(operation); this.streamID = streamID; } Session getSession() { return SessionManager.getInstance().getIncomingServerSession(streamID); } public void run() { super.run(); if (operation == Operation.getLocalDomain) { result = ((IncomingServerSession) getSession()).getLocalDomain(); } else if (operation == Operation.getAddress) { result = getSession().getAddress(); } else if (operation == Operation.isUsingServerDialback) { result = ((IncomingServerSession) getSession()).isUsingServerDialback(); } } public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal(out); ExternalizableUtil.getInstance().writeSafeUTF(out, streamID.getID()); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal(in); streamID = BasicStreamIDFactory.createStreamID( ExternalizableUtil.getInstance().readSafeUTF(in) ); } public String toString() { return super.toString() + " operation: " + operation + " streamID: " + streamID; } }
apache-2.0
nickbabcock/dropwizard
dropwizard-request-logging/src/test/java/io/dropwizard/request/logging/ExternalRequestLogFactoryTest.java
1313
package io.dropwizard.request.logging; import io.dropwizard.configuration.YamlConfigurationFactory; import io.dropwizard.jackson.DiscoverableSubtypeResolver; import io.dropwizard.jackson.Jackson; import io.dropwizard.logging.BootstrapLogging; import io.dropwizard.util.Resources; import io.dropwizard.validation.BaseValidator; import org.junit.Test; import java.io.File; import static org.assertj.core.api.Assertions.assertThat; public class ExternalRequestLogFactoryTest { static { BootstrapLogging.bootstrap(); } @Test public void canBeDeserialized() throws Exception { RequestLogFactory<?> externalRequestLogFactory = new YamlConfigurationFactory<>(RequestLogFactory.class, BaseValidator.newValidator(), Jackson.newObjectMapper(), "dw") .build(new File(Resources.getResource("yaml/externalRequestLog.yml").toURI())); assertThat(externalRequestLogFactory).isNotNull(); assertThat(externalRequestLogFactory).isInstanceOf(ExternalRequestLogFactory.class); assertThat(externalRequestLogFactory.isEnabled()).isTrue(); } @Test public void isDiscoverable() throws Exception { assertThat(new DiscoverableSubtypeResolver().getDiscoveredSubtypes()) .contains(ExternalRequestLogFactory.class); } }
apache-2.0
chrismattmann/labkey-client
src/main/java/org/labkey/remoteapi/security/CreateUserResponse.java
1324
/* * Copyright (c) 2009 LabKey Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.labkey.remoteapi.security; import org.labkey.remoteapi.CommandResponse; import org.labkey.remoteapi.Command; import org.json.simple.JSONObject; /* * User: dave * Date: Sep 28, 2009 * Time: 2:50:07 PM */ public class CreateUserResponse extends CommandResponse { public CreateUserResponse(String text, int statusCode, String contentType, JSONObject json, Command sourceCommand) { super(text, statusCode, contentType, json, sourceCommand); } public Number getUserId() { return getProperty("userId"); } public String getEmail() { return getProperty("email"); } public String getMessage() { return getProperty("message"); } }
apache-2.0
cncduLee/event
simple-event/src/main/java/com/bitium/event/event/dispatch/EventSet.java
804
/** * Copyright (c) 2015, bitium.com. All rights reserved. */ package com.bitium.event.event.dispatch; import com.bitium.event.event.Subscriber; import java.util.Iterator; /** * <b>项目名</b>: event <br> * <b>包名称</b>: com.bitium.event.event.dispatch <br> * <b>类名称</b>: Event <br> * <b>类描述</b>: <br> * <b>创建人</b>: <a href="mailto:bitium@126.com">李朋明</a> <br> * <b>修改人</b>: <br> * <b>创建时间</b>: 2015/7/21 <br> * <b>修改时间</b>: <br> * <b>修改备注</b>: <br> * * @version 1.0.0 <br> */ final class EventSet { final Object event; final Iterator<Subscriber> subscribers; EventSet(Object event, Iterator<Subscriber> subscribers) { this.event = event; this.subscribers = subscribers; } }
apache-2.0
ResearchWorx/Cresco-Agent-MD5-Plugin
src/main/java/plugincore/PluginEngine.java
10970
package plugincore; import java.io.File; import java.io.FileInputStream; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.jar.Attributes; import java.util.jar.JarInputStream; import java.util.jar.Manifest; import org.apache.commons.configuration.SubnodeConfiguration; import channels.MD5Consumer; import channels.MD5Producer; import com.rabbitmq.client.Channel; import com.rabbitmq.client.Connection; import com.rabbitmq.client.ConnectionFactory; import dummyserv.DummyServerEngine; import shared.Clogger; import shared.MsgEvent; import shared.MsgEventType; import shared.PluginImplementation; public class PluginEngine { public static ConnectionFactory factory; //public static Connection connection; //public static String LOG_CHANNEL_NAME; //public static Channel log_channel; //public static long incomingCount = 0; //public static long outgoingCount = 0; public static long etIn = 0; public static long etOut = 0; public static ExecutorService consumerExecutor; //public static Thread LogConsumerThread; public static boolean LogConsumerActive = false; public static boolean LogConsumerEnabled = false; public static ExecutorService producerExecutor; //private static Thread ProducerThread; public static boolean ProducerActive = false; public static boolean ProducerEnabled = false; public static boolean isActive; public static PluginConfig config; public static String pluginName; public static String pluginVersion; public static String plugin; public static String agent; public static String region; public static CommandExec commandExec; public static ConcurrentMap<String,MsgEvent> rpcMap; public static RPCCall rpcc; public static ConcurrentLinkedQueue<MsgEvent> logOutQueue; public static WatchDog wd; public static WatchPerf wp; public static Clogger clog; public static ConcurrentLinkedQueue<MsgEvent> msgInQueue; public PluginEngine() { pluginName="MD5Plugin"; } public void shutdown() { System.out.println("Plugin Shutdown : Agent=" + agent + "pluginname=" + plugin); isActive = false; wd.timer.cancel(); //prevent rediscovery wp.timer.cancel(); //prevent rediscovery //ProducerActive = false; //LogConsumerActive = false; PluginEngine.ProducerEnabled = false; PluginEngine.LogConsumerEnabled = false; //ProducerThread.interrupt(); //LogConsumerThread.interrupt(); try { if(producerExecutor != null) { producerExecutor.shutdown(); System.out.println("-----------------------"); producerExecutor.awaitTermination(2, TimeUnit.SECONDS); } if(consumerExecutor != null) { consumerExecutor.shutdown(); System.out.println("-----------------------"); consumerExecutor.awaitTermination(2, TimeUnit.SECONDS); // wait until all tasks are finished } } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); System.out.println("Plugin Shutdown : Agent=" + agent + "pluginname=" + plugin + " could not terminate " + e.toString()); } System.out.println("All tasks are finished!"); /* while(PluginEngine.ProducerActive) { System.out.println("Plugin Shutdown : Agent=" + agent + "pluginname=" + plugin + " waiting on producer thread to exit."); try { Thread.sleep(1000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } while(PluginEngine.LogConsumerActive) { System.out.println("Plugin Shutdown : Agent=" + agent + "pluginname=" + plugin + " waiting on consumer thread to exit."); try { Thread.sleep(1000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } */ try { MsgEvent me = new MsgEvent(MsgEventType.CONFIG,region,null,null,"disabled"); me.setParam("src_region",region); me.setParam("src_agent",agent); me.setParam("src_plugin",plugin); me.setParam("dst_region",region); //msgOutQueue.offer(me); msgInQueue.offer(me); //PluginEngine.rpcc.call(me); System.out.println("Sent disable message"); } catch(Exception ex) { String msg2 = "Plugin Shutdown Failed: Agent=" + agent + "pluginname=" + plugin; clog.error(msg2); } } public String getName() { return pluginName; } public String getVersion() //This should pull the version information from jar Meta data { String version; try{ String jarFile = PluginImplementation.class.getProtectionDomain().getCodeSource().getLocation().getPath(); File file = new File(jarFile.substring(5, (jarFile.length() -2))); FileInputStream fis = new FileInputStream(file); @SuppressWarnings("resource") JarInputStream jarStream = new JarInputStream(fis); Manifest mf = jarStream.getManifest(); Attributes mainAttribs = mf.getMainAttributes(); version = mainAttribs.getValue("Implementation-Version"); } catch(Exception ex) { String msg = "Unable to determine Plugin Version " + ex.toString(); clog.error(msg); version = "Unable to determine Version"; } return pluginName + "." + version; } //steps to init the plugin public boolean initialize(ConcurrentLinkedQueue<MsgEvent> msgOutQueue,ConcurrentLinkedQueue<MsgEvent> msgInQueue, SubnodeConfiguration configObj, String region,String agent, String plugin) { isActive = true; commandExec = new CommandExec(); rpcMap = new ConcurrentHashMap<String,MsgEvent>(); rpcc = new RPCCall(); //this.msgOutQueue = msgOutQueue; //send directly to log queue this.msgInQueue = msgInQueue; //messages to agent should go here this.agent = agent; this.plugin = plugin; this.region = region; try{ if(msgInQueue == null) { System.out.println("MsgInQueue==null"); return false; } this.config = new PluginConfig(configObj); //create logger clog = new Clogger(msgInQueue,region,agent,plugin); //send logs directly to outqueue String startmsg = "Initializing Plugin: Region=" + region + " Agent=" + agent + " plugin=" + plugin + " version" + getVersion(); clog.log(startmsg); try{ //establish AMPQ connectivity factory = new ConnectionFactory(); factory.setHost(PluginEngine.config.getAMPQControlHost()); factory.setUsername(PluginEngine.config.getAMPQControlUser()); factory.setPassword(PluginEngine.config.getAMPQControlPassword()); factory.setConnectionTimeout(10000); } catch(Exception ex) { System.out.println("MD5 AMPQ Plugin Init error: " + ex.toString()); return false; } if(config.getLogProducerEnabled()) { producerExecutor = Executors.newFixedThreadPool(1); //MD5Producer v = new MD5Producer(); Runnable v = new MD5Producer();; producerExecutor.execute(v); while(!ProducerEnabled) { Thread.sleep(1000); String msg = "Waiting for MD5Producer Initialization : Region=" + region + " Agent=" + agent + " plugin=" + plugin; clog.log(msg); } PluginEngine.ProducerActive = true; } if(config.getLogConsumerEnabled()) { consumerExecutor = Executors.newFixedThreadPool(1); Runnable lc = new MD5Consumer(); consumerExecutor.execute(lc); while(!LogConsumerEnabled) { Thread.sleep(1000); String msg = "Waiting for MD5Consumer Initialization : Region=" + region + " Agent=" + agent + " plugin=" + plugin; clog.log(msg); } PluginEngine.LogConsumerActive = true; } /* int timeout = 20; PluginEngine.ProducerActive = true; MD5Producer v = new MD5Producer(); ProducerThread = new Thread(v); if(config.getLogProducerEnabled()) { ProducerThread.start(); while(!ProducerEnabled) { Thread.sleep(1000); String msg = "Waiting for MD5Producer Initialization : Region=" + region + " Agent=" + agent + " plugin=" + plugin; clog.log(msg); if(!ProducerThread.isAlive()) { System.out.println("Starting New Producer Thread"); ProducerThread.start(); } } } */ /* PluginEngine.LogConsumerActive = true; MD5Consumer lc = new MD5Consumer(); LogConsumerThread = new Thread(lc); if(config.getLogConsumerEnabled()) { LogConsumerThread.start(); while(!LogConsumerEnabled) { Thread.sleep(1000); String msg = "Waiting for MD5Consumer Initialization : Region=" + region + " Agent=" + agent + " plugin=" + plugin; clog.log(msg); if(!LogConsumerThread.isAlive()) { System.out.println("Starting New Consumer Thread"); LogConsumerThread.start(); } } } */ /* try { System.out.println("Starting MD5 Service"); DummyServerEngine dummyEngine = new DummyServerEngine(); Thread dummyServerThread = new Thread(dummyEngine); dummyServerThread.start(); } catch(Exception ex) { System.out.println("Unable to Start HTTP Service : " + ex.toString()); } */ /* AMPQLogProducer v = new AMPQLogProducer(); ProducerThread = new Thread(v); ProducerThread.start(); while(!ProducerEnabled) { Thread.sleep(1000); String msg = "Waiting for AMPQProducer Initialization : Region=" + region + " Agent=" + agent + " plugin=" + plugin; clog.log(msg); } */ wd = new WatchDog(); wp = new WatchPerf(); return true; } catch(Exception ex) { String msg = "ERROR IN PLUGIN: : Region=" + region + " Agent=" + agent + " plugin=" + plugin + " " + ex.toString(); clog.error(msg); return false; } } public void msgIn(MsgEvent me) { final MsgEvent ce = me; try { Thread thread = new Thread(){ public void run(){ try { MsgEvent re = commandExec.cmdExec(ce); if(re != null) { re.setReturn(); //reverse to-from for return msgInQueue.offer(re); //send message back to queue } } catch(Exception ex) { System.out.println("Controller : PluginEngine : msgIn Thread: " + ex.toString()); } } }; thread.start(); } catch(Exception ex) { System.out.println("Controller : PluginEngine : msgIn Thread: " + ex.toString()); } } }
apache-2.0
bcopeland/hbase-thrift
src/main/java/org/apache/hadoop/hbase/util/CompoundBloomFilterBase.java
2859
/* * Copyright 2011 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.util; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.io.RawComparator; public class CompoundBloomFilterBase implements BloomFilterBase { /** * At read time, the total number of chunks. At write time, the number of * chunks created so far. The first chunk has an ID of 0, and the current * chunk has the ID of numChunks - 1. */ protected int numChunks; /** * The Bloom filter version. There used to be a DynamicByteBloomFilter which * had version 2. */ public static final int VERSION = 3; /** Target error rate for configuring the filter and for information */ protected float errorRate; /** The total number of keys in all chunks */ protected long totalKeyCount; protected long totalByteSize; protected long totalMaxKeys; /** Hash function type to use, as defined in {@link Hash} */ protected int hashType; /** Comparator used to compare Bloom filter keys */ protected RawComparator<byte[]> comparator; @Override public long getMaxKeys() { return totalMaxKeys; } @Override public long getKeyCount() { return totalKeyCount; } @Override public long getByteSize() { return totalByteSize; } private static final byte[] DUMMY = new byte[0]; /** * Prepare an ordered pair of row and qualifier to be compared using * {@link KeyValue.KeyComparator}. This is only used for row-column Bloom * filters. */ @Override public byte[] createBloomKey(byte[] row, int roffset, int rlength, byte[] qualifier, int qoffset, int qlength) { if (qualifier == null) qualifier = DUMMY; // Make sure this does not specify a timestamp so that the default maximum // (most recent) timestamp is used. KeyValue kv = KeyValue.createFirstOnRow(row, roffset, rlength, DUMMY, 0, 0, qualifier, qoffset, qlength); return kv.getKey(); } @Override public RawComparator<byte[]> getComparator() { return comparator; } }
apache-2.0
googleapis/java-game-servers
proto-google-cloud-game-servers-v1/src/main/java/com/google/cloud/gaming/v1/Common.java
21643
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/gaming/v1/common.proto package com.google.cloud.gaming.v1; public final class Common { private Common() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_OperationMetadata_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_OperationMetadata_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_OperationMetadata_OperationStatusEntry_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_OperationMetadata_OperationStatusEntry_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_OperationStatus_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_OperationStatus_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_LabelSelector_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_LabelSelector_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_LabelSelector_LabelsEntry_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_LabelSelector_LabelsEntry_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_RealmSelector_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_RealmSelector_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_Schedule_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_Schedule_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_SpecSource_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_SpecSource_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_TargetDetails_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_TargetDetails_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_TargetFleet_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_TargetFleet_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_TargetFleetAutoscaler_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_TargetFleetAutoscaler_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_TargetState_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_TargetState_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_DeployedFleetDetails_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_DeployedFleetDetails_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleet_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleet_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleet_DeployedFleetStatus_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleet_DeployedFleetStatus_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleetAutoscaler_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleetAutoscaler_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n#google/cloud/gaming/v1/common.proto\022\026g" + "oogle.cloud.gaming.v1\032\037google/api/field_" + "behavior.proto\032\036google/protobuf/duration" + ".proto\032\037google/protobuf/timestamp.proto\032" + "\034google/api/annotations.proto\"\332\003\n\021Operat" + "ionMetadata\0224\n\013create_time\030\001 \001(\0132\032.googl" + "e.protobuf.TimestampB\003\340A\003\0221\n\010end_time\030\002 " + "\001(\0132\032.google.protobuf.TimestampB\003\340A\003\022\023\n\006" + "target\030\003 \001(\tB\003\340A\003\022\021\n\004verb\030\004 \001(\tB\003\340A\003\022\033\n\016" + "status_message\030\005 \001(\tB\003\340A\003\022#\n\026requested_c" + "ancellation\030\006 \001(\010B\003\340A\003\022\030\n\013api_version\030\007 " + "\001(\tB\003\340A\003\022\030\n\013unreachable\030\010 \003(\tB\003\340A\003\022]\n\020op" + "eration_status\030\t \003(\0132>.google.cloud.gami" + "ng.v1.OperationMetadata.OperationStatusE" + "ntryB\003\340A\003\032_\n\024OperationStatusEntry\022\013\n\003key" + "\030\001 \001(\t\0226\n\005value\030\002 \001(\0132\'.google.cloud.gam" + "ing.v1.OperationStatus:\0028\001\"\356\001\n\017Operation" + "Status\022\021\n\004done\030\001 \001(\010B\003\340A\003\022E\n\nerror_code\030" + "\002 \001(\01621.google.cloud.gaming.v1.Operation" + "Status.ErrorCode\022\025\n\rerror_message\030\003 \001(\t\"" + "j\n\tErrorCode\022\032\n\026ERROR_CODE_UNSPECIFIED\020\000" + "\022\022\n\016INTERNAL_ERROR\020\001\022\025\n\021PERMISSION_DENIE" + "D\020\002\022\026\n\022CLUSTER_CONNECTION\020\003\"\201\001\n\rLabelSel" + "ector\022A\n\006labels\030\001 \003(\01321.google.cloud.gam" + "ing.v1.LabelSelector.LabelsEntry\032-\n\013Labe" + "lsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"" + "\037\n\rRealmSelector\022\016\n\006realms\030\001 \003(\t\"\261\001\n\010Sch" + "edule\022.\n\nstart_time\030\001 \001(\0132\032.google.proto" + "buf.Timestamp\022,\n\010end_time\030\002 \001(\0132\032.google" + ".protobuf.Timestamp\0224\n\021cron_job_duration" + "\030\003 \001(\0132\031.google.protobuf.Duration\022\021\n\tcro" + "n_spec\030\004 \001(\t\";\n\nSpecSource\022\037\n\027game_serve" + "r_config_name\030\001 \001(\t\022\014\n\004name\030\002 \001(\t\"\255\004\n\rTa" + "rgetDetails\022 \n\030game_server_cluster_name\030" + "\001 \001(\t\022#\n\033game_server_deployment_name\030\002 \001" + "(\t\022O\n\rfleet_details\030\003 \003(\01328.google.cloud" + ".gaming.v1.TargetDetails.TargetFleetDeta" + "ils\032\203\003\n\022TargetFleetDetails\022S\n\005fleet\030\001 \001(" + "\0132D.google.cloud.gaming.v1.TargetDetails" + ".TargetFleetDetails.TargetFleet\022b\n\nautos" + "caler\030\002 \001(\0132N.google.cloud.gaming.v1.Tar" + "getDetails.TargetFleetDetails.TargetFlee" + "tAutoscaler\032T\n\013TargetFleet\022\014\n\004name\030\001 \001(\t" + "\0227\n\013spec_source\030\002 \001(\0132\".google.cloud.gam" + "ing.v1.SpecSource\032^\n\025TargetFleetAutoscal" + "er\022\014\n\004name\030\001 \001(\t\0227\n\013spec_source\030\002 \001(\0132\"." + "google.cloud.gaming.v1.SpecSource\"E\n\013Tar" + "getState\0226\n\007details\030\001 \003(\0132%.google.cloud" + ".gaming.v1.TargetDetails\"\233\005\n\024DeployedFle" + "etDetails\022R\n\016deployed_fleet\030\001 \001(\0132:.goog" + "le.cloud.gaming.v1.DeployedFleetDetails." + "DeployedFleet\022a\n\023deployed_autoscaler\030\002 \001" + "(\0132D.google.cloud.gaming.v1.DeployedFlee" + "tDetails.DeployedFleetAutoscaler\032\303\002\n\rDep" + "loyedFleet\022\r\n\005fleet\030\001 \001(\t\022\022\n\nfleet_spec\030" + "\002 \001(\t\0227\n\013spec_source\030\003 \001(\0132\".google.clou" + "d.gaming.v1.SpecSource\022^\n\006status\030\005 \001(\0132N" + ".google.cloud.gaming.v1.DeployedFleetDet" + "ails.DeployedFleet.DeployedFleetStatus\032v" + "\n\023DeployedFleetStatus\022\026\n\016ready_replicas\030" + "\001 \001(\003\022\032\n\022allocated_replicas\030\002 \001(\003\022\031\n\021res" + "erved_replicas\030\003 \001(\003\022\020\n\010replicas\030\004 \001(\003\032\205" + "\001\n\027DeployedFleetAutoscaler\022\022\n\nautoscaler" + "\030\001 \001(\t\0227\n\013spec_source\030\004 \001(\0132\".google.clo" + "ud.gaming.v1.SpecSource\022\035\n\025fleet_autosca" + "ler_spec\030\003 \001(\tB\\\n\032com.google.cloud.gamin" + "g.v1P\001Z<google.golang.org/genproto/googl" + "eapis/cloud/gaming/v1;gamingb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.FieldBehaviorProto.getDescriptor(), com.google.protobuf.DurationProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), com.google.api.AnnotationsProto.getDescriptor(), }); internal_static_google_cloud_gaming_v1_OperationMetadata_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_cloud_gaming_v1_OperationMetadata_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_OperationMetadata_descriptor, new java.lang.String[] { "CreateTime", "EndTime", "Target", "Verb", "StatusMessage", "RequestedCancellation", "ApiVersion", "Unreachable", "OperationStatus", }); internal_static_google_cloud_gaming_v1_OperationMetadata_OperationStatusEntry_descriptor = internal_static_google_cloud_gaming_v1_OperationMetadata_descriptor.getNestedTypes().get(0); internal_static_google_cloud_gaming_v1_OperationMetadata_OperationStatusEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_OperationMetadata_OperationStatusEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_cloud_gaming_v1_OperationStatus_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_cloud_gaming_v1_OperationStatus_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_OperationStatus_descriptor, new java.lang.String[] { "Done", "ErrorCode", "ErrorMessage", }); internal_static_google_cloud_gaming_v1_LabelSelector_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_google_cloud_gaming_v1_LabelSelector_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_LabelSelector_descriptor, new java.lang.String[] { "Labels", }); internal_static_google_cloud_gaming_v1_LabelSelector_LabelsEntry_descriptor = internal_static_google_cloud_gaming_v1_LabelSelector_descriptor.getNestedTypes().get(0); internal_static_google_cloud_gaming_v1_LabelSelector_LabelsEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_LabelSelector_LabelsEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_cloud_gaming_v1_RealmSelector_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_google_cloud_gaming_v1_RealmSelector_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_RealmSelector_descriptor, new java.lang.String[] { "Realms", }); internal_static_google_cloud_gaming_v1_Schedule_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_google_cloud_gaming_v1_Schedule_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_Schedule_descriptor, new java.lang.String[] { "StartTime", "EndTime", "CronJobDuration", "CronSpec", }); internal_static_google_cloud_gaming_v1_SpecSource_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_google_cloud_gaming_v1_SpecSource_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_SpecSource_descriptor, new java.lang.String[] { "GameServerConfigName", "Name", }); internal_static_google_cloud_gaming_v1_TargetDetails_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_google_cloud_gaming_v1_TargetDetails_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_TargetDetails_descriptor, new java.lang.String[] { "GameServerClusterName", "GameServerDeploymentName", "FleetDetails", }); internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_descriptor = internal_static_google_cloud_gaming_v1_TargetDetails_descriptor.getNestedTypes().get(0); internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_descriptor, new java.lang.String[] { "Fleet", "Autoscaler", }); internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_TargetFleet_descriptor = internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_TargetFleet_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_TargetFleet_descriptor, new java.lang.String[] { "Name", "SpecSource", }); internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_TargetFleetAutoscaler_descriptor = internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_descriptor .getNestedTypes() .get(1); internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_TargetFleetAutoscaler_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_TargetDetails_TargetFleetDetails_TargetFleetAutoscaler_descriptor, new java.lang.String[] { "Name", "SpecSource", }); internal_static_google_cloud_gaming_v1_TargetState_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_google_cloud_gaming_v1_TargetState_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_TargetState_descriptor, new java.lang.String[] { "Details", }); internal_static_google_cloud_gaming_v1_DeployedFleetDetails_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_google_cloud_gaming_v1_DeployedFleetDetails_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_DeployedFleetDetails_descriptor, new java.lang.String[] { "DeployedFleet", "DeployedAutoscaler", }); internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleet_descriptor = internal_static_google_cloud_gaming_v1_DeployedFleetDetails_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleet_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleet_descriptor, new java.lang.String[] { "Fleet", "FleetSpec", "SpecSource", "Status", }); internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleet_DeployedFleetStatus_descriptor = internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleet_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleet_DeployedFleetStatus_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleet_DeployedFleetStatus_descriptor, new java.lang.String[] { "ReadyReplicas", "AllocatedReplicas", "ReservedReplicas", "Replicas", }); internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleetAutoscaler_descriptor = internal_static_google_cloud_gaming_v1_DeployedFleetDetails_descriptor .getNestedTypes() .get(1); internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleetAutoscaler_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_gaming_v1_DeployedFleetDetails_DeployedFleetAutoscaler_descriptor, new java.lang.String[] { "Autoscaler", "SpecSource", "FleetAutoscalerSpec", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( descriptor, registry); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.protobuf.DurationProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); com.google.api.AnnotationsProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
apache-2.0
opensim-org/opensim-core
OpenSim/Simulation/Model/Force.cpp
7667
/* -------------------------------------------------------------------------- * * OpenSim: Force.cpp * * -------------------------------------------------------------------------- * * The OpenSim API is a toolkit for musculoskeletal modeling and simulation. * * See http://opensim.stanford.edu and the NOTICE file for more information. * * OpenSim is developed at Stanford University and supported by the US * * National Institutes of Health (U54 GM072970, R24 HD065690) and by DARPA * * through the Warrior Web program. * * * * Copyright (c) 2005-2017 Stanford University and the Authors * * Author(s): Ajay Seth * * * * Licensed under the Apache License, Version 2.0 (the "License"); you may * * not use this file except in compliance with the License. You may obtain a * * copy of the License at http://www.apache.org/licenses/LICENSE-2.0. * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * -------------------------------------------------------------------------- */ #include "Force.h" #include "Model.h" #include <OpenSim/Simulation/Model/ForceAdapter.h> using namespace SimTK; namespace OpenSim { //============================================================================= // CONSTRUCTOR(S) AND DESTRUCTOR //============================================================================= //_____________________________________________________________________________ // Default constructor. Force::Force() { setNull(); constructProperties(); } //============================================================================= // CONSTRUCTION METHODS //============================================================================= //_____________________________________________________________________________ // Set the data members of this Force to their null values. void Force::setNull() { setAuthors("Peter Eastman, Ajay Seth"); } //_____________________________________________________________________________ // Define properties. void Force::constructProperties() { constructProperty_appliesForce(true); } void Force::updateFromXMLNode(SimTK::Xml::Element& node, int versionNumber) { if(versionNumber < XMLDocument::getLatestVersion()) { if (versionNumber < 30509) { // Rename property 'isDisabled' to 'appliesForce' and // negate the contained value. std::string oldName{ "isDisabled" }; std::string newName{ "appliesForce" }; if (node.hasElement(oldName)) { auto elem = node.getRequiredElement(oldName); bool isDisabled = false; elem.getValue().tryConvertToBool(isDisabled); // now update tag name to 'appliesForce' elem.setElementTag(newName); // update its value to be the opposite of 'isDisabled' elem.setValue(SimTK::String(!isDisabled)); } } } Super::updateFromXMLNode(node, versionNumber); } // Create an underlying SimTK::Force to represent the OpenSim::Force in the // computational system. Create a SimTK::Force::Custom by default. void Force::extendAddToSystem(SimTK::MultibodySystem& system) const { Super::extendAddToSystem(system); ForceAdapter* adapter = new ForceAdapter(*this); SimTK::Force::Custom force(_model->updForceSubsystem(), adapter); // Beyond the const Component get the index so we can access the SimTK::Force later Force* mutableThis = const_cast<Force *>(this); mutableThis->_index = force.getForceIndex(); } void Force::extendInitStateFromProperties(SimTK::State& s) const { Super::extendInitStateFromProperties(s); SimTK::Force& simForce = _model->updForceSubsystem().updForce(_index); // Otherwise we have to change the status of the constraint if(get_appliesForce()) simForce.enable(s); else simForce.disable(s); } void Force::extendSetPropertiesFromState(const SimTK::State& state) { Super::extendSetPropertiesFromState(state); set_appliesForce(appliesForce(state)); } //_____________________________________________________________________________ /** * Set whether or not this Force is applied. * Simbody multibody system instance is realized every time the appliesForce * changes, BUT multiple sets to the same value have no cost. * * @param applyForce If true the force is applied (or enabled). If false the Force is not applied (or disabled). */ void Force::setAppliesForce(SimTK::State& s, bool applyForce) const { if(_index.isValid()){ SimTK::Force& simtkForce = _model->updForceSubsystem().updForce(_index); if(applyForce) simtkForce.enable(s); else simtkForce.disable(s); } } bool Force::appliesForce(const SimTK::State& s) const { if(_index.isValid()){ SimTK::Force& simtkForce = _model->updForceSubsystem().updForce(_index); return !simtkForce.isDisabled(s); } return get_appliesForce(); } //----------------------------------------------------------------------------- // ABSTRACT METHODS //----------------------------------------------------------------------------- //_____________________________________________________________________________ double Force::computePotentialEnergy(const SimTK::State& state) const { return 0.0; } //----------------------------------------------------------------------------- // METHODS TO APPLY FORCES AND TORQUES //----------------------------------------------------------------------------- void Force::applyForceToPoint(const SimTK::State &s, const PhysicalFrame &frame, const Vec3& point, const Vec3& forceInG, Vector_<SpatialVec> &bodyForces) const { // get the point expressed in frame, F, expressed in the base, B. auto p_B = frame.findTransformInBaseFrame()*point; _model->getMatterSubsystem().addInStationForce(s, frame.getMobilizedBodyIndex(), p_B, forceInG, bodyForces); } void Force::applyTorque(const SimTK::State &s, const PhysicalFrame& frame, const Vec3& torque, Vector_<SpatialVec> &bodyForces) const { _model->getMatterSubsystem().addInBodyTorque(s, frame.getMobilizedBodyIndex(), torque, bodyForces); } void Force::applyGeneralizedForce(const SimTK::State &s, const Coordinate &coord, double force, Vector &mobilityForces) const { _model->getMatterSubsystem().addInMobilityForce(s, SimTK::MobilizedBodyIndex(coord.getBodyIndex()), SimTK::MobilizerUIndex(coord.getMobilizerQIndex()), force, mobilityForces); } } // end of namespace OpenSim
apache-2.0
eCollobro/eCollabro
eCollabro.Service.DataContracts/Core/SiteRoleFeaturesRequest.cs
799
// <copyright company="eCollabro"> // Copyright (c) 2014 All Rights Reserved // Collaborative Framework and CMS - eCollabro.com // </copyright> // <author>Anand Singh</author> #region References using System.Runtime.Serialization; #endregion namespace eCollabro.Service.DataContracts.RequestWrapper { /// <summary> /// SiteRoleFeaturesRequest /// </summary> [DataContract] public class SiteRoleFeaturesRequest:BaseServiceRequest { private int SiteId; private int roleId; [DataMember] public int SiteID { get { return SiteId; } set { SiteId = value; } } [DataMember] public int RoleID { get { return roleId; } set { roleId = value; } } public SiteRoleFeaturesRequest() { } } }
apache-2.0
devilgate/pertwee
src/main/java/software/tinlion/pertwee/Item.java
860
package software.tinlion.pertwee; import java.util.List; import software.tinlion.pertwee.exception.RequiredElementNotPresentException; /** * Represents an item in a feed, eg a blog post, podcast episode, etc. */ public interface Item { public String id() throws RequiredElementNotPresentException; public String contentText(); public String contentHtml(); public String url(); public String externalUrl(); public String title(); public String summary(); public String image(); public String bannerImage(); public String datePublished(); public String dateModified(); public Author author(); List<Author> authors() throws RequiredElementNotPresentException; public List<String> tags(); public String language(); boolean hasAttachments(); List<Attachment> attachments(); }
apache-2.0
juweiping/ocms
src/org/openuap/cms/survey/dao/hibernate/VoterDaoImpl.java
6935
/** * $Id: VoterDaoImpl.java 3951 2010-11-02 10:13:17Z orangeforjava $ */ package org.openuap.cms.survey.dao.hibernate; import java.util.List; import org.openuap.base.dao.hibernate.BaseDaoHibernate; import org.openuap.base.util.QueryInfo; import org.openuap.base.util.context.PageBuilder; import org.openuap.cms.survey.dao.VoterDao; import org.openuap.cms.survey.model.Answer; import org.openuap.cms.survey.model.AnswerItem; import org.openuap.cms.survey.model.Voter; /** * <p> * Title: VoterDaoImpl * </p> * * <p> * Description: * </p> * * <p> * Copyright: Copyright (c) 2006 * </p> * * <p> * Company: http://www.openuap.org * </p> * * @author Weiping Ju * @version 1.0 */ public class VoterDaoImpl extends BaseDaoHibernate implements VoterDao { public VoterDaoImpl() { } public Long addVoter(Voter voter) { return (Long) this.getHibernateTemplate().save(voter); } public Voter getVoterByName(String name) { String hql = "from Voter where voterName=?"; return (Voter) this.findUniqueResult(hql, new Object[] { name }); } public void saveVoter(Voter voter) { this.getHibernateTemplate().saveOrUpdate(voter); } public void deleteVoter(Voter voter) { this.getHibernateTemplate().delete(voter); } public Voter searchVoter(String ipAddress, Long groupId) { String hql = "FROM Voter AS vr WHERE vr.voterIpaddress=? AND vr.voterGroupId=?"; return (Voter) this.findUniqueResult(hql, new Object[] { ipAddress, groupId }); } public void deleteVoterById(Long voterId) { Voter voter = getVoterById(voterId); if (voter != null) { this.deleteVoter(voter); } } public void addAnswer(Answer answer) { this.getHibernateTemplate().save(answer); } public void saveAnswer(Answer answer) { this.getHibernateTemplate().saveOrUpdate(answer); } public void deleteAnswer(Answer answer) { this.getHibernateTemplate().delete(answer); } public void deleteAnswerByVoter(Long voterId) { String hql = "delete from Answer where voterId=" + voterId; this.executeUpdate(hql); } public void deleteAnswerBySurvey(Long surveyId) { String hql = "delete from Answer where surveyId=" + surveyId; this.executeUpdate(hql); } public List getAnswer(Long surveyId, QueryInfo qi, PageBuilder pb) { String hql = "select e from Answer e where e.surveyId=" + surveyId + " order by e.questionId"; String hql_count = "select count(*) from Answer where surveyId=" + surveyId; return this.getObjects(hql, hql_count, qi, pb); } public List getAnswerByVoter(Long voterId, QueryInfo qi, PageBuilder pb) { String hql = "select e from Answer e where e.voterId=" + voterId + " order by e.questionId"; String hql_count = "select count(*) from Answer where voterId=" + voterId; return this.getObjects(hql, hql_count, qi, pb); } public void addAnswerItem(AnswerItem answerItem) { this.addObject(answerItem); } public void saveAnswerItem(AnswerItem answerItem) { this.saveObject(answerItem); } public void deleteAnswerItem(Answer answerItem) { this.deleteObject(answerItem); } public void deleteAnswerItemByVoter(Long voterId) { String hql = "delete from AnswerItem where voterId=" + voterId; this.executeUpdate(hql); } public void deleteAnswerItemBySurvey(Long surveyId) { String hql = "delete from AnswerItem where surveyId=" + surveyId; this.executeUpdate(hql); } public List getAnswerItemBySurvey(Long surveyId, QueryInfo qi, PageBuilder pb) { String hql = "select e from AnswerItem e where e.surveyId=" + surveyId + " order by e.questionId"; String hql_count = "select count(*) from AnswerItem where surveyId=" + surveyId; return this.getObjects(hql, hql_count, qi, pb); } public List getAnswerItemByVoter(Long voterId, QueryInfo qi, PageBuilder pb) { String hql = "select e from AnswerItem e where e.voterId=" + voterId + " order by e.questionId"; String hql_count = "select count(*) from AnswerItem where voterId=" + voterId; return this.getObjects(hql, hql_count, qi, pb); } public Voter getVoterById(Long voterId) { String hql = "from Voter where voterId=?"; return (Voter) this.findUniqueResult(hql, new Object[] { voterId }); } public List getAnswerByQuestion(Long questionId, QueryInfo qi, PageBuilder pb) { String hql = "select e from Answer e where e.questionId=" + questionId + " order by e.questionId"; String hql_count = "select count(*) from Answer where questionId=" + questionId; return this.getObjects(hql, hql_count, qi, pb); } public Answer getAnswerById(Long surveyRecordId, Long voterId, Long questionId) { String hql = "from Answer where surveyRecordId=" + surveyRecordId + " and voterId=" + voterId + " and questionId=" + questionId; return (Answer) this.findUniqueResult(hql); } public List getAnswerItemByItem(Long itemId, QueryInfo qi, PageBuilder pb) { String hql = "select e from AnswerItem e where e.questionItemId=" + itemId + " "; String hql_count = "select count(*) from AnswerItem where questionItemId=" + itemId; return this.getObjects(hql, hql_count, qi, pb); } public AnswerItem getAnswerItemById(Long itemId, Long voterId, Long surveyId) { String hql = "from AnswerItem where surveyId=" + surveyId + " and voterId=" + voterId + " and questionItemId=" + itemId; return (AnswerItem) this.findUniqueResult(hql); } public List getVoters(Long surveyId, Long surveyRecordId, QueryInfo qi, PageBuilder pb) { String hql = "select e from Voter e where e.voterSurveyId=" + surveyId + " and e.surveyRecordId=" + surveyRecordId + " order by e.voterPollDate desc"; String hql_count = "select count(*) from Voter e where e.voterSurveyId=" + surveyId + " and e.surveyRecordId=" + surveyRecordId; return this.getObjects(hql, hql_count, qi, pb); } public List getAnswer(Long surveyId, Long surveyRecordId, QueryInfo qi, PageBuilder pb) { String hql = "select e from Answer e where e.surveyId=" + surveyId + " and e.surveyRecordId=" + surveyRecordId + " order by e.questionId"; String hql_count = "select count(*) from Answer where surveyId=" + surveyId + "and surveyRecordId=" + surveyRecordId; return this.getObjects(hql, hql_count, qi, pb); } public List getAnswerByQuestion(Long questionId, Long surveyRecordId, QueryInfo qi, PageBuilder pb) { String hql = "select e from Answer e where e.questionId=" + questionId + " and e.surveyRecordId=" + surveyRecordId + " order by e.questionId"; String hql_count = "select count(*) from Answer e where e.questionId=" + questionId + " and e.surveyRecordId=" + surveyRecordId; return this.getObjects(hql, hql_count, qi, pb); } public int getQuestionAnswerTotalCount(Long surveyRecordId, Long questionId) { String hql = "select count(*) from Answer e where e.questionId=" + questionId + " and e.surveyRecordId=" + surveyRecordId; return this.getIntFieldValue(hql); } }
apache-2.0
Seltzer/Algovis---Algorithm-Visualisation
branches/resources/cxxtest/v3.10.1/test/main.cpp
876
#include <cxxtest/TestRunner.h> #include <cxxtest/TestListener.h> #include <stdio.h> // // This test runner printer some statistics at the end of the run. // Note that it uses <stdio.h> and not <iostream> for compatibility // with older compilers. // using namespace CxxTest; class SummaryPrinter : public CxxTest::TestListener { public: void run() { CxxTest::TestRunner::runAllTests( *this ); } void leaveWorld( const CxxTest::WorldDescription &wd ) { printf( "Number of suites: %u\n", wd.numSuites() ); printf( "Number of tests: %u\n", wd.numTotalTests() ); printf( "Number of failed tests: %u\n", TestTracker::tracker().failedTests() ); } }; int main() { SummaryPrinter().run(); return 0; } // // Local Variables: // compile-command: "perl test.pl" // End: //
apache-2.0
tiborvass/docker
volume/service/default_driver_stubs.go
293
// +build !linux,!windows package service // import "github.com/tiborvass/docker/volume/service" import ( "github.com/tiborvass/docker/pkg/idtools" "github.com/tiborvass/docker/volume/drivers" ) func setupDefaultDriver(_ *drivers.Store, _ string, _ idtools.Identity) error { return nil }
apache-2.0
projectcypress/cdatools
exporter/cat3/cat3.go
4179
package cat3 import ( "time" "github.com/projectcypress/cdatools/models" ) type Authenticator struct { models.Authenticator Author } func NewAuthenticator(a models.Authenticator) Authenticator { return Authenticator{ Authenticator: a, Author: NewAuthor(a.Author), } } type Header struct { models.Header Authenticator Authors } func NewHeader(h models.Header) Header { return Header{ Header: h, Authenticator: NewAuthenticator(h.Authenticator), Authors: NewAuthors(h.Authors), } } type Doc struct { Header Header Measures models.Measure MeasureSection MeasureSection StartDate int64 EndDate int64 Timestamp int64 } func NewDoc(h models.Header, ms MeasureSection, m models.Measure, start int64, end int64) Doc { timeNow := time.Now().UTC().Unix() return Doc{ Header: NewHeader(h), Measures: m, MeasureSection: ms, StartDate: start, EndDate: end, Timestamp: timeNow, } } func (d Doc) Template() string { t := `<?xml version="1.0" encoding="utf-8"?> <ClinicalDocument xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="urn:hl7-org:v3" xmlns:cda="urn:hl7-org:v3"> <!-- ******************************************************** CDA Header ******************************************************** --> <realmCode code="US"/> <typeId root="2.16.840.1.113883.1.3" extension="POCD_HD000040"/> <!-- QRDA Category III template ID (this template ID differs from QRDA III comment only template ID). --> <templateId root="2.16.840.1.113883.10.20.27.1.1" extension="2016-09-01"/> <id {{if .Header.Identifier.Root}}root="{{escape .Header.Identifier.Root}}"{{end}} extension="{{escape .Header.Identifier.Extension}}" /> <!-- SHALL QRDA III document type code --> <code code="55184-6" codeSystem="2.16.840.1.113883.6.1" codeSystemName="LOINC" displayName="Quality Reporting Document Architecture Calculated Summary Report"/> <!-- SHALL Title, SHOULD have this content --> <title>QRDA Calculated Summary Report</title> <!-- SHALL --> <effectiveTime value="{{timeToFormat .Timestamp "20060102"}}"/> <confidentialityCode codeSystem="2.16.840.1.113883.5.25" code="N"/> <languageCode code="en"/> <!-- SHOULD The version of the file being submitted. --> <versionNumber value="1"/> <!-- SHALL contain recordTarget and ID - but ID is nulled to NA. This is an aggregate summary report. Therefore CDA's required patient identifier is nulled. --> <recordTarget> <patientRole> <id nullFlavor="NA"/> </patientRole> </recordTarget> {{Print .Header.Authors.Template .Header.Authors}} <!-- SHALL have 1..* author. MAY be device or person. The author of the CDA document in this example is a device at a data submission vendor/registry. --> <!-- The custodian of the CDA document is the same as the legal authenticator in this example and represents the reporting organization. --> <!-- SHALL --> <custodian> <assignedCustodian> {{Print .Header.Organization.Template .Header.Organization}} <!--TagName "representedCustodianOrganization"--> </assignedCustodian> </custodian> <!-- The legal authenticator of the CDA document is a single person who is at the same organization as the custodian in this example. This element must be present. --> <!-- SHALL --> <legalAuthenticator> <!-- SHALL --> <time value="{{.Header.Authenticator.Author.Time}}"/> <!-- SHALL --> <signatureCode code="S"/> <assignedEntity> <!-- SHALL ID --> {{Print .Header.Authenticator.Author.Ids.Template .Header.Authenticator.Author.Ids}} {{Print .Header.Authenticator.Author.Addresses.Template .Header.Authenticator.Author.Addresses}} <assignedPerson> <name> <given>{{escape .Header.Authenticator.Author.Person.First}}</given> <family>{{escape .Header.Authenticator.Author.Person.Last}}</family> </name> </assignedPerson> {{Print .Header.Authenticator.Author.Organization.Template .Header.Authenticator.Author.Organization}} <!--TagName "representedOrganization"--> </assignedEntity> </legalAuthenticator> {{Print .MeasureSection.Template .MeasureSection}} </ClinicalDocument>` return t }
apache-2.0
durban/seals
checker/src/main/scala/dev/tauri/seals/checker/package.scala
856
/* * Copyright 2017-2020 Daniel Urban and contributors listed in AUTHORS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dev.tauri.seals import scala.reflect.runtime.{ universe => ru } import cats.Show package object checker { private[checker] implicit val ruSymbolShow: Show[ru.Symbol] = Show.show(sym => ru.show(sym)) }
apache-2.0
mysll/flynet
server/src/server/util/ringbuffer.go
1647
package util import ( "container/list" "errors" "sync" ) var ( ERRRINGBUFFULL = errors.New("ringbuffer full") ) type Message struct { begin int size int } type RingBuffer struct { buffer []byte l sync.RWMutex first int end int size int msgs *list.List } func (r *RingBuffer) require(lens int) int { if lens > r.size { return -1 } if r.msgs.Len() > 0 && r.first == r.end { return -1 } if r.first <= r.end { if r.end+lens > r.size { r.end = 0 return r.require(lens) } } else { if r.end+lens > r.first { return -1 } } return r.end } func (r *RingBuffer) Count() int { r.l.RLock() defer r.l.RUnlock() return r.msgs.Len() } func (r *RingBuffer) Empty() bool { return r.msgs.Len() == 0 } func (r *RingBuffer) Push(data []byte) error { r.l.Lock() defer r.l.Unlock() datasize := len(data) if idx := r.require(datasize); idx != -1 { r.end += datasize copy(r.buffer[idx:idx+datasize], data) r.msgs.PushBack(Message{begin: idx, size: datasize}) return nil } return ERRRINGBUFFULL } func (r *RingBuffer) Pop() ([]byte, error) { r.l.Lock() defer r.l.Unlock() if r.msgs.Len() == 0 { return nil, errors.New("no data") } e := r.msgs.Front() m := e.Value.(Message) r.msgs.Remove(e) r.first = m.begin + m.size return r.buffer[m.begin : m.begin+m.size], nil } func (r *RingBuffer) Reset() { r.l.Lock() defer r.l.Unlock() r.first = 0 r.end = 0 r.msgs = list.New() } func NewRingBuffer(capacity int) *RingBuffer { rb := &RingBuffer{} rb.buffer = make([]byte, capacity, capacity) rb.first = 0 rb.end = 0 rb.size = capacity rb.msgs = list.New() return rb }
apache-2.0
skonves/Konves.TextGraph
src/Konves.TextGraph/Annotations/List.cs
605
using Konves.TextGraph.Models; namespace Konves.TextGraph.Annotations { public sealed class List : Annotation { public List(int offset, int length, bool isOrdered) : base(offset, length) { IsOrdered = isOrdered; } public bool IsOrdered { get; } public override string Subtype { get { return "list"; } } public override string Type { get { return "structure"; } } public override bool Equals(object obj) { return base.Equals(obj) && (obj as List).IsOrdered == IsOrdered; } public override int GetHashCode() { return GetHashCode(GetHashCode(), IsOrdered); } } }
apache-2.0
qrealka/skia-hc
samplecode/SampleLayers.cpp
8184
/* * Copyright 2011 Google Inc. * * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #include "SampleCode.h" #include "SkView.h" #include "SkCanvas.h" #include "SkBlurMaskFilter.h" #include "SkCamera.h" #include "SkColorFilter.h" #include "SkColorPriv.h" #include "SkDevice.h" #include "SkGradientShader.h" #include "SkImage.h" #include "SkInterpolator.h" #include "SkMaskFilter.h" #include "SkPath.h" #include "SkRegion.h" #include "SkShader.h" #include "SkTime.h" #include "SkTypeface.h" #include "SkUtils.h" #include "SkKey.h" #include "SkXfermode.h" #include "SkDrawFilter.h" static void make_paint(SkPaint* paint, const SkMatrix& localMatrix) { SkColor colors[] = { 0, SK_ColorWHITE }; SkPoint pts[] = { { 0, 0 }, { 0, SK_Scalar1*20 } }; paint->setShader(SkGradientShader::MakeLinear(pts, colors, nullptr, 2, SkShader::kClamp_TileMode, 0, &localMatrix)); paint->setXfermodeMode(SkXfermode::kDstIn_Mode); } static void dump_layers(const char label[], SkCanvas* canvas) { SkDebugf("Dump Layers(%s)\n", label); SkCanvas::LayerIter iter(canvas, true); int index = 0; while (!iter.done()) { SkImageInfo info = iter.device()->imageInfo(); const SkIRect& clip = iter.clip().getBounds(); SkDebugf("Layer[%d] bitmap [%d %d] X=%d Y=%d clip=[%d %d %d %d] alpha=%d\n", index++, info.width(), info.height(), iter.x(), iter.y(), clip.fLeft, clip.fTop, clip.fRight, clip.fBottom, iter.paint().getAlpha()); iter.next(); } } // test drawing with strips of fading gradient above and below static void test_fade(SkCanvas* canvas) { SkAutoCanvasRestore ar(canvas, true); SkRect r; SkPaint p; p.setAlpha(0x88); SkAutoCanvasRestore ar2(canvas, false); // create the layers r.set(0, 0, SkIntToScalar(100), SkIntToScalar(100)); canvas->clipRect(r); r.fBottom = SkIntToScalar(20); canvas->saveLayer(&r, nullptr); r.fTop = SkIntToScalar(80); r.fBottom = SkIntToScalar(100); canvas->saveLayer(&r, nullptr); // now draw the "content" if (true) { r.set(0, 0, SkIntToScalar(100), SkIntToScalar(100)); canvas->saveLayerAlpha(&r, 0x80); SkPaint p; p.setColor(SK_ColorRED); p.setAntiAlias(true); canvas->drawOval(r, p); dump_layers("inside layer alpha", canvas); canvas->restore(); } else { r.set(0, 0, SkIntToScalar(100), SkIntToScalar(100)); SkPaint p; p.setColor(SK_ColorRED); p.setAntiAlias(true); canvas->drawOval(r, p); } // return; dump_layers("outside layer alpha", canvas); // now apply an effect SkMatrix m; m.setScale(SK_Scalar1, -SK_Scalar1); m.postTranslate(0, SkIntToScalar(100)); SkPaint paint; make_paint(&paint, m); r.set(0, 0, SkIntToScalar(100), SkIntToScalar(20)); // SkDebugf("--------- draw top grad\n"); canvas->drawRect(r, paint); r.fTop = SkIntToScalar(80); r.fBottom = SkIntToScalar(100); // SkDebugf("--------- draw bot grad\n"); canvas->drawRect(r, paint); } class RedFilter : public SkDrawFilter { public: bool filter(SkPaint* p, SkDrawFilter::Type) override { fColor = p->getColor(); if (fColor == SK_ColorRED) { p->setColor(SK_ColorGREEN); } return true; } private: SkColor fColor; }; class LayersView : public SkView { public: LayersView() {} protected: // overrides from SkEventSink bool onQuery(SkEvent* evt) override { if (SampleCode::TitleQ(*evt)) { SampleCode::TitleR(evt, "Layers"); return true; } return this->INHERITED::onQuery(evt); } void drawBG(SkCanvas* canvas) { canvas->drawColor(SK_ColorGRAY); } void onDraw(SkCanvas* canvas) override { this->drawBG(canvas); if (true) { SkRect r; r.set(SkIntToScalar(0), SkIntToScalar(0), SkIntToScalar(220), SkIntToScalar(120)); SkPaint p; canvas->saveLayer(&r, &p); canvas->drawColor(0xFFFF0000); p.setAlpha(0); // or 0 p.setXfermodeMode(SkXfermode::kSrc_Mode); canvas->drawOval(r, p); canvas->restore(); return; } if (false) { SkRect r; r.set(SkIntToScalar(0), SkIntToScalar(0), SkIntToScalar(220), SkIntToScalar(120)); SkPaint p; p.setAlpha(0x88); p.setAntiAlias(true); if (true) { canvas->saveLayer(&r, &p); p.setColor(0xFFFF0000); canvas->drawOval(r, p); canvas->restore(); } p.setColor(0xFF0000FF); r.offset(SkIntToScalar(20), SkIntToScalar(50)); canvas->drawOval(r, p); } if (false) { SkPaint p; p.setAlpha(0x88); p.setAntiAlias(true); canvas->translate(SkIntToScalar(300), 0); SkRect r; r.set(SkIntToScalar(0), SkIntToScalar(0), SkIntToScalar(220), SkIntToScalar(60)); canvas->saveLayer(&r, &p); r.set(SkIntToScalar(0), SkIntToScalar(0), SkIntToScalar(220), SkIntToScalar(120)); p.setColor(SK_ColorBLUE); canvas->drawOval(r, p); canvas->restore(); return; } test_fade(canvas); } SkView::Click* onFindClickHandler(SkScalar x, SkScalar y, unsigned modi) override { this->inval(nullptr); return this->INHERITED::onFindClickHandler(x, y, modi); } bool onClick(Click* click) override { return this->INHERITED::onClick(click); } virtual bool handleKey(SkKey) { this->inval(nullptr); return true; } private: typedef SkView INHERITED; }; DEF_SAMPLE( return new LayersView; ) ////////////////////////////////////////////////////////////////////////////// #include "SkBlurImageFilter.h" #include "SkMatrixConvolutionImageFilter.h" #include "SkMorphologyImageFilter.h" #include "Resources.h" #include "SkAnimTimer.h" class BackdropView : public SampleView { SkPoint fCenter; SkScalar fAngle; sk_sp<SkImage> fImage; sk_sp<SkImageFilter> fFilter; public: BackdropView() { fCenter.set(200, 150); fAngle = 0; fImage = GetResourceAsImage("mandrill_512.png"); fFilter = SkDilateImageFilter::Make(8, 8, nullptr); } protected: // overrides from SkEventSink bool onQuery(SkEvent* evt) override { if (SampleCode::TitleQ(*evt)) { SampleCode::TitleR(evt, "Backdrop"); return true; } return this->INHERITED::onQuery(evt); } void onDrawContent(SkCanvas* canvas) override { canvas->drawImage(fImage.get(), 0, 0, nullptr); const SkScalar w = 250; const SkScalar h = 150; SkPath path; path.addOval(SkRect::MakeXYWH(-w/2, -h/2, w, h)); SkMatrix m; m.setRotate(fAngle); m.postTranslate(fCenter.x(), fCenter.y()); path.transform(m); canvas->clipPath(path, SkRegion::kIntersect_Op, true); const SkRect bounds = path.getBounds(); SkPaint paint; paint.setAlpha(0xCC); canvas->saveLayer({ &bounds, &paint, fFilter.get(), 0 }); canvas->restore(); } bool onAnimate(const SkAnimTimer& timer) override { fAngle = SkDoubleToScalar(fmod(timer.secs() * 360 / 5, 360)); return true; } SkView::Click* onFindClickHandler(SkScalar x, SkScalar y, unsigned modi) override { this->inval(nullptr); return new Click(this); } bool onClick(Click* click) override { this->inval(nullptr); fCenter = click->fCurr; return this->INHERITED::onClick(click); } private: typedef SampleView INHERITED; }; DEF_SAMPLE( return new BackdropView; )
apache-2.0
googleads/google-ads-java
google-ads-stubs-v8/src/main/java/com/google/ads/googleads/v8/services/stub/CampaignBidModifierServiceStub.java
1927
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v8.services.stub; import com.google.ads.googleads.v8.resources.CampaignBidModifier; import com.google.ads.googleads.v8.services.GetCampaignBidModifierRequest; import com.google.ads.googleads.v8.services.MutateCampaignBidModifiersRequest; import com.google.ads.googleads.v8.services.MutateCampaignBidModifiersResponse; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.UnaryCallable; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Base stub class for the CampaignBidModifierService service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public abstract class CampaignBidModifierServiceStub implements BackgroundResource { public UnaryCallable<GetCampaignBidModifierRequest, CampaignBidModifier> getCampaignBidModifierCallable() { throw new UnsupportedOperationException("Not implemented: getCampaignBidModifierCallable()"); } public UnaryCallable<MutateCampaignBidModifiersRequest, MutateCampaignBidModifiersResponse> mutateCampaignBidModifiersCallable() { throw new UnsupportedOperationException( "Not implemented: mutateCampaignBidModifiersCallable()"); } @Override public abstract void close(); }
apache-2.0
Thorium-Sim/thorium
src/components/macros/assignSpaceEdventuresBadge.js
2792
import React from "react"; import {Query} from "react-apollo"; import gql from "graphql-tag.macro"; import {FormGroup, Label, Input} from "helpers/reactstrap"; export default ({updateArgs, args, stations, clients}) => { return ( <Query fetchPolicy="cache-first" query={gql` query Badges { thorium { spaceEdventuresCenter { badges { id name } } } } `} > {({loading, data, error}) => ( <FormGroup className="macro-template"> <Label> Space EdVentures Badge <div> {!data && !data.thorium ? ( "Loading" ) : error ? ( "Error loading badges." ) : data && data.thorium && data.thorium.spaceEdventuresCenter && data.thorium.spaceEdventuresCenter.badges ? ( <Input type="select" value={args ? args.badgeId : "select"} onChange={evt => updateArgs("badgeId", evt.target.value)} > <option value="select">Select a Badge</option> {data.thorium.spaceEdventuresCenter.badges.map(m => ( <option key={m.id} value={m.id}> {m.name} </option> ))} </Input> ) : loading ? ( <p>Loading...</p> ) : ( <p> Not connected to a SpaceEdVentures.com Center. Cannot get badges. </p> )} </div> </Label> <div> <Label>Station</Label> <Input type="select" value={args.station || ""} onChange={e => updateArgs("station", e.target.value)} > <option value="" disabled> Select a Station </option> {stations && stations.length > 0 && ( <optgroup label="Stations"> {stations.map(c => ( <option value={c.name} key={c.name}> {c.name} </option> ))} </optgroup> )} {clients && clients.length > 0 && ( <optgroup label="Clients"> {clients.map(c => ( <option value={c.id} key={c.id}> {c.id} </option> ))} </optgroup> )} </Input> </div> </FormGroup> )} </Query> ); };
apache-2.0
magnetsystems/message-server
server/plugins/mmxmgmt/src/test/java/com/magnet/mmx/server/plugin/mmxmgmt/servlet/MMXTopicResourceTest.java
8842
/* Copyright (c) 2015 Magnet Systems, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.magnet.mmx.server.plugin.mmxmgmt.servlet; import com.google.gson.Gson; import com.magnet.mmx.protocol.MMXTopicId; import com.magnet.mmx.server.api.v1.protocol.TopicSubscription; import com.magnet.mmx.server.plugin.mmxmgmt.db.*; import com.magnet.mmx.server.plugin.mmxmgmt.message.MMXPubSubItem; import com.magnet.mmx.server.plugin.mmxmgmt.message.MMXPubSubPayload; import com.magnet.mmx.server.plugin.mmxmgmt.topic.TopicNode; import com.magnet.mmx.server.plugin.mmxmgmt.util.AuthUtil; import com.magnet.mmx.server.plugin.mmxmgmt.util.DBTestUtil; import com.magnet.mmx.server.plugin.mmxmgmt.util.MMXServerConstants; import com.magnet.mmx.util.GsonData; import com.magnet.mmx.util.TopicHelper; import mockit.Mock; import mockit.MockUp; import mockit.integration.junit4.JMockit; import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.lang.RandomStringUtils; import org.dbunit.database.DatabaseConnection; import org.dbunit.database.IDatabaseConnection; import org.dbunit.dataset.IDataSet; import org.dbunit.dataset.xml.FlatXmlDataSetBuilder; import org.dbunit.operation.DatabaseOperation; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.client.Invocation; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.IOException; import java.io.InputStream; import java.sql.Connection; import java.util.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; /** */ @RunWith(JMockit.class) public class MMXTopicResourceTest extends BaseJAXRSTest { private static final Logger LOGGER = LoggerFactory.getLogger(MMXTopicResourceTest.class); //http://localhost:5220/mmxmgmt/api/v1/send_message private static final String baseUri = "http://localhost:8086/mmxmgmt/api/v1/topics"; private static BasicDataSource ds; private static String appId = "7wmi73wxin9"; private static String apiKey = "4111f18a-9fcc-4e84-8cb9-aad6ea7bf024"; private static void setupMocks() { new MockUp<TopicResource>() { @Mock protected List<TopicSubscription> getTopicSubscriptions (String appId, MMXTopicId tid) { LOGGER.trace("MOCKED getTopicSubscriptions : appid={}, tid={}", appId, tid); String topicId = TopicHelper.makeTopic(appId, tid.getEscUserId(), tid.getName()); List<TopicSubscription> list = new ArrayList<TopicSubscription>(10); for (int i=0; i < 100; i++) { StubTopicSubscription subscription = new StubTopicSubscription(); subscription.setTopic(tid.getName()); subscription.setUsername(RandomStringUtils.randomAlphanumeric(3)); subscription.setSubscriptionId(RandomStringUtils.randomAlphanumeric(10)); if (i%3 ==0 ) { subscription.setDeviceId(RandomStringUtils.randomNumeric(10)); } list.add(subscription); } return list; } }; //for allowing auth new MockUp<AuthUtil>() { @Mock public boolean isAuthorized(HttpServletRequest headers) throws IOException { return true; } }; /** * For using the db unit datasource */ new MockUp<TopicResource>() { @Mock public ConnectionProvider getConnectionProvider() { return new BasicDataSourceConnectionProvider(ds); } }; } public MMXTopicResourceTest() { super(baseUri); } @BeforeClass public static void setup() throws Exception{ ds = UnitTestDSProvider.getDataSource(); //clean any existing records and load some records into the database. FlatXmlDataSetBuilder builder = new FlatXmlDataSetBuilder(); builder.setColumnSensing(true); Connection setup = ds.getConnection(); IDatabaseConnection con = new DatabaseConnection(setup); { InputStream xmlInput = DeviceDAOImplTest.class.getResourceAsStream("/data/app-data-1.xml"); IDataSet dataSet = builder.build(xmlInput); DatabaseOperation.CLEAN_INSERT.execute(con, dataSet); } { InputStream xmlInput = DeviceDAOImplTest.class.getResourceAsStream("/data/pubsub-node-data-1.xml"); IDataSet dataSet = builder.build(xmlInput); DatabaseOperation.CLEAN_INSERT.execute(con, dataSet); } setupMocks(); } @AfterClass public static void cleanup() throws Exception { DBTestUtil.cleanTables(new String[] {"mmxTag"}, new BasicDataSourceConnectionProvider(ds)); ds.close(); } @Test public void testSearchTopics() { String topicName = "sport"; WebTarget target = getClient().target(baseUri) .queryParam(TopicResource.TOPIC_NAME, topicName); Invocation.Builder invocationBuilder = target.request(MediaType.APPLICATION_JSON); invocationBuilder.header(MMXServerConstants.HTTP_HEADER_APP_ID, appId); invocationBuilder.header(MMXServerConstants.HTTP_HEADER_REST_API_KEY, apiKey); Response response = invocationBuilder.get(); int statusCode = response.getStatus(); assertEquals("Non matching response code",Response.Status.OK.getStatusCode(), statusCode); String json = response.readEntity(String.class); assertNotNull("Response is null", json); Gson gson = GsonData.getGson(); SearchResult<TopicNode> result = gson.fromJson(json, SearchResult.class); assertNotNull(result); response.close(); } @Test public void testListTopicSubscriptionsForATopic() { String topicName = "sport"; WebTarget target = getClient().target(baseUri + "/" + topicName + "/" + "subscriptions") .queryParam(TopicResource.APP_ID_KEY, appId) .queryParam(TopicResource.TOPIC_NAME, topicName); Invocation.Builder invocationBuilder = target.request(MediaType.APPLICATION_JSON); invocationBuilder.header(MMXServerConstants.HTTP_HEADER_APP_ID, appId); invocationBuilder.header(MMXServerConstants.HTTP_HEADER_REST_API_KEY, apiKey); Response response = invocationBuilder.get(); int statusCode = response.getStatus(); assertEquals("Non matching response code",Response.Status.OK.getStatusCode(), statusCode); String json = response.readEntity(String.class); assertNotNull("Response is null", json); Gson gson = GsonData.getGson(); List<TopicSubscription> result = gson.fromJson(json, List.class); assertNotNull(result); response.close(); } private static MMXPubSubItem getRandomPubSubItem() { MMXPubSubPayload payload = new MMXPubSubPayload("text_" + RandomStringUtils.randomAlphabetic(2),new Date().toString(), "Hello_World_" + RandomStringUtils.randomAlphabetic(2)); Map<String, String> map = new HashMap<String, String>(); map.put("key1", "value1"); map.put("key2", "value2"); MMXPubSubItem item = new MMXPubSubItem(); item.setItemId(RandomStringUtils.randomAlphanumeric(10)); item.setMeta(map); item.setAppId(RandomStringUtils.randomAlphanumeric(8)); item.setTopicName(RandomStringUtils.randomAlphanumeric(10)); item.setPayload(payload); return item; } private static class StubTopicSubscription extends TopicSubscription { private String username; // a user ID for user topic, or null for global topic private String topic; // the topic name private String subscriptionId; // the subscription ID private String deviceId; // device identifier associated with this subscription. public void setTopic(String topic) { this.topic = topic; } public void setUsername(String username) { this.username = username; } public void setDeviceId(String deviceId) { this.deviceId = deviceId; } public void setSubscriptionId(String subscriptionId) { this.subscriptionId = subscriptionId; } public String getUsername() { return username; } public String getTopicName() { return topic; } public String getSubscriptionId() { return subscriptionId; } public String getDeviceId() { return deviceId; } } }
apache-2.0
0359xiaodong/android-autofittextview
library/src/main/java/me/grantland/widget/AutofitTextView.java
11166
package me.grantland.widget; import android.content.Context; import android.content.res.Resources; import android.content.res.TypedArray; import android.text.Layout; import android.text.StaticLayout; import android.text.TextPaint; import android.util.AttributeSet; import android.util.DisplayMetrics; import android.util.Log; import android.util.TypedValue; import android.widget.TextView; import me.grantland.autofittextview.R; /** * A TextView that resizes it's text to be no larger than the width of the view. * * @author Grantland Chew <grantlandchew@gmail.com> */ public class AutofitTextView extends TextView { private static final String TAG = "AutoFitTextView"; private static final boolean SPEW = false; // Minimum size of the text in pixels private static final int DEFAULT_MIN_TEXT_SIZE = 8; //sp // How precise we want to be when reaching the target textWidth size private static final float PRECISION = 0.5f; // Attributes private boolean mSizeToFit; private int mMaxLines; private float mMinTextSize; private float mMaxTextSize; private float mPrecision; private TextPaint mPaint; public AutofitTextView(Context context) { super(context); init(context, null, 0); } public AutofitTextView(Context context, AttributeSet attrs) { super(context, attrs); init(context, attrs, 0); } public AutofitTextView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(context, attrs, defStyle); } private void init(Context context, AttributeSet attrs, int defStyle) { float scaledDensity = context.getResources().getDisplayMetrics().scaledDensity; boolean sizeToFit = true; int minTextSize = (int) scaledDensity * DEFAULT_MIN_TEXT_SIZE; float precision = PRECISION; if (attrs != null) { TypedArray ta = context.obtainStyledAttributes( attrs, R.styleable.AutofitTextView, defStyle, 0); sizeToFit = ta.getBoolean(R.styleable.AutofitTextView_sizeToFit, sizeToFit); minTextSize = ta.getDimensionPixelSize(R.styleable.AutofitTextView_minTextSize, minTextSize); precision = ta.getFloat(R.styleable.AutofitTextView_precision, precision); ta.recycle(); } mPaint = new TextPaint(); setSizeToFit(sizeToFit); setRawTextSize(super.getTextSize()); setRawMinTextSize(minTextSize); setPrecision(precision); } // Getters and Setters /** * @return whether or not the text will be automatically resized to fit its constraints. */ public boolean isSizeToFit() { return mSizeToFit; } /** * Sets the property of this field (singleLine, to automatically resize the text to fit its constraints. */ public void setSizeToFit() { setSizeToFit(true); } /** * If true, the text will automatically be resized to fit its constraints; if false, it will * act like a normal TextView. * * @param sizeToFit */ public void setSizeToFit(boolean sizeToFit) { mSizeToFit = sizeToFit; refitText(); } /** * {@inheritDoc} */ @Override public float getTextSize() { return mMaxTextSize; } /** * {@inheritDoc} */ @Override public void setTextSize(int unit, float size) { Context context = getContext(); Resources r = Resources.getSystem(); if (context != null) { r = context.getResources(); } setRawTextSize(TypedValue.applyDimension(unit, size, r.getDisplayMetrics())); } private void setRawTextSize(float size) { if (size != mMaxTextSize) { mMaxTextSize = size; refitText(); } } /** * @return the minimum size (in pixels) of the text size in this AutofitTextView */ public float getMinTextSize() { return mMinTextSize; } /** * Set the minimum text size to a given unit and value. See TypedValue for the possible * dimension units. * * @param unit The desired dimension unit. * @param minSize The desired size in the given units. * * @attr ref me.grantland.R.styleable#AutofitTextView_minTextSize */ public void setMinTextSize(int unit, float minSize) { Context context = getContext(); Resources r = Resources.getSystem(); if (context != null) { r = context.getResources(); } setRawMinTextSize(TypedValue.applyDimension(unit, minSize, r.getDisplayMetrics())); } /** * Set the minimum text size to the given value, interpreted as "scaled pixel" units. This size * is adjusted based on the current density and user font size preference. * * @param minSize The scaled pixel size. * * @attr ref me.grantland.R.styleable#AutofitTextView_minTextSize */ public void setMinTextSize(int minSize) { setMinTextSize(TypedValue.COMPLEX_UNIT_SP, minSize); } private void setRawMinTextSize(float minSize) { if (minSize != mMinTextSize) { mMinTextSize = minSize; refitText(); } } /** * @return the amount of precision used to calculate the correct text size to fit within it's * bounds. */ public float getPrecision() { return mPrecision; } /** * Set the amount of precision used to calculate the correct text size to fit within it's * bounds. Lower precision is more precise and takes more time. * * @param precision The amount of precision. */ public void setPrecision(float precision) { if (precision != mPrecision) { mPrecision = precision; refitText(); } } /** * {@inheritDoc} */ @Override public void setLines(int lines) { super.setLines(lines); mMaxLines = lines; refitText(); } /** * {@inheritDoc} */ @Override public int getMaxLines() { return mMaxLines; } /** * {@inheritDoc} */ @Override public void setMaxLines(int maxLines) { super.setMaxLines(maxLines); if (maxLines != mMaxLines) { mMaxLines = maxLines; refitText(); } } /** * Re size the font so the specified text fits in the text box assuming the text box is the * specified width. */ private void refitText() { if (!mSizeToFit) { return; } if (mMaxLines <= 0) { // Don't auto-size since there's no limit on lines. return; } String text = getText().toString(); int targetWidth = getWidth() - getPaddingLeft() - getPaddingRight(); if (targetWidth > 0) { Context context = getContext(); Resources r = Resources.getSystem(); DisplayMetrics displayMetrics; float size = mMaxTextSize; float high = size; float low = 0; if (context != null) { r = context.getResources(); } displayMetrics = r.getDisplayMetrics(); mPaint.set(getPaint()); mPaint.setTextSize(size); if ((mMaxLines == 1 && mPaint.measureText(text) > targetWidth) || getLineCount(text, mPaint, size, targetWidth, displayMetrics) > mMaxLines) { size = getTextSize(text, mPaint, targetWidth, mMaxLines, low, high, mPrecision, displayMetrics); } if (size < mMinTextSize) { size = mMinTextSize; } super.setTextSize(TypedValue.COMPLEX_UNIT_PX, size); } } /** * Recursive binary search to find the best size for the text */ private static float getTextSize(String text, TextPaint paint, float targetWidth, int maxLines, float low, float high, float precision, DisplayMetrics displayMetrics) { float mid = (low + high) / 2.0f; int lineCount = 1; StaticLayout layout = null; paint.setTextSize(TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_PX, mid, displayMetrics)); if (maxLines != 1) { layout = new StaticLayout(text, paint, (int)targetWidth, Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, true); lineCount = layout.getLineCount(); } if (SPEW) Log.d(TAG, "low=" + low + " high=" + high + " mid=" + mid + " target=" + targetWidth + " maxLines=" + maxLines + " lineCount=" + lineCount); if (lineCount > maxLines) { return getTextSize(text, paint, targetWidth, maxLines, low, mid, precision, displayMetrics); } else if (lineCount < maxLines) { return getTextSize(text, paint, targetWidth, maxLines, mid, high, precision, displayMetrics); } else { float maxLineWidth = 0; if (maxLines == 1) { maxLineWidth = paint.measureText(text); } else { for (int i = 0; i < lineCount; i++) { if (layout.getLineWidth(i) > maxLineWidth) { maxLineWidth = layout.getLineWidth(i); } } } if ((high - low) < precision) { return low; } else if (maxLineWidth > targetWidth) { return getTextSize(text, paint, targetWidth, maxLines, low, mid, precision, displayMetrics); } else if (maxLineWidth < targetWidth) { return getTextSize(text, paint, targetWidth, maxLines, mid, high, precision, displayMetrics); } else { return mid; } } } private static int getLineCount(String text, TextPaint paint, float size, float width, DisplayMetrics displayMetrics) { paint.setTextSize(TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_PX, size, displayMetrics)); StaticLayout layout = new StaticLayout(text, paint, (int)width, Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, true); return layout.getLineCount(); } @Override protected void onTextChanged(final CharSequence text, final int start, final int lengthBefore, final int lengthAfter) { super.onTextChanged(text, start, lengthBefore, lengthAfter); refitText(); } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); if (w != oldw) { refitText(); } } }
apache-2.0
AldanisVigo/CPlusPlus
TriangleClass/Triangle.cpp
1277
/* //Class Declaration (Triangle.h) #ifndef TRIANGLE_H #define TRIANGLE_H #include <iostream> using namespace std; class Triangle{ private: double Hypotenuse; double Opposite; double Adjacent; public: Triangle(); //Overloaded Constructor Triangle(double,double,double); //Destructor ~Triangle(); void set_hypotenuse(double); void set_opposite(double); void set_adjacent(double); double get_hypotenuse(); double get_opposite(); double get_adjacent(); double sine(); double cosine(); double tangent(); }; #endif //TRIANGLE_H */ #include <iostream> #include "Triangle.h" using namespace std; Triangle::set_hypotenuse(double nH){ Hypotenuse = nH; } Triangle::set_adjacent(double nA){ Adjacent = nA; } Triangle::set_opposite(double nO){ Opposite = nO; } Triangle::Triangle(){ cout << "The triangle constructor was called!" << endl; } Triangle::Triangle(double nH, double nA, double nO){ Hypotenuse = nH; Adjacent = nA; Opposite = nO; cout << "The overloaded triangle constructor was called!" << endl; } ~Triangle(){ cout << "The triangle distructor was called!" << endl; } double Triangle::sine(){ return Opposite / Hypotenuse; } double Triangle::cosine(){ return Adjacent / Hypotenuse; } double Triangle::tangent(){ return Opposite / Adjacent; }
apache-2.0
data-integrations/zuora
src/main/java/io/cdap/plugin/zuora/objects/ProxyCreateTaxationItem.java
5441
/* * Copyright © 2019 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package io.cdap.plugin.zuora.objects; import com.google.gson.annotations.SerializedName; import io.cdap.cdap.api.data.schema.Schema; import io.cdap.plugin.zuora.restobjects.annotations.ObjectDefinition; import io.cdap.plugin.zuora.restobjects.annotations.ObjectFieldDefinition; import io.cdap.plugin.zuora.restobjects.objects.BaseObject; import javax.annotation.Nullable; /** * Object name: ProxyCreateTaxationItem (ProxyCreateTaxationItem). * Related objects: **/ @SuppressWarnings("unused") @ObjectDefinition( Name = "ProxyCreateTaxationItem", ObjectType = ObjectDefinition.ObjectDefinitionType.NESTED ) public class ProxyCreateTaxationItem extends BaseObject { /** * Name: AccountingCode (AccountingCode), Type: string. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("accountingCode") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String accountingCode; /** * Name: ExemptAmount (ExemptAmount), Type: number. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("exemptAmount") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String exemptAmount; /** * Name: InvoiceItemId (InvoiceItemId), Type: string. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("invoiceItemId") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String invoiceItemId; /** * Name: Jurisdiction (Jurisdiction), Type: string. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("jurisdiction") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String jurisdiction; /** * Name: LocationCode (LocationCode), Type: string. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("locationCode") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String locationCode; /** * Name: Name (Name), Type: string. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("name") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String name; /** * Name: TaxAmount (TaxAmount), Type: number. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("taxAmount") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String taxAmount; /** * Name: TaxCode (TaxCode), Type: string. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("taxCode") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String taxCode; /** * Name: TaxCodeDescription (TaxCodeDescription), Type: string. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("taxCodeDescription") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String taxCodeDescription; /** * Name: TaxDate (TaxDate), Type: string. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("taxDate") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String taxDate; /** * Name: TaxRate (TaxRate), Type: number. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("taxRate") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String taxRate; /** * Name: TaxRateDescription (TaxRateDescription), Type: string. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("taxRateDescription") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String taxRateDescription; /** * Name: TaxRateType (TaxRateType), Type: string. * Options (custom, update, select): false, false, false **/ @Nullable @SerializedName("taxRateType") @ObjectFieldDefinition(FieldType = Schema.Type.STRING) private String taxRateType; @Override public void addFields() { addCustomField("accountingCode", accountingCode, String.class); addCustomField("exemptAmount", exemptAmount, String.class); addCustomField("invoiceItemId", invoiceItemId, String.class); addCustomField("jurisdiction", jurisdiction, String.class); addCustomField("locationCode", locationCode, String.class); addCustomField("name", name, String.class); addCustomField("taxAmount", taxAmount, String.class); addCustomField("taxCode", taxCode, String.class); addCustomField("taxCodeDescription", taxCodeDescription, String.class); addCustomField("taxDate", taxDate, String.class); addCustomField("taxRate", taxRate, String.class); addCustomField("taxRateDescription", taxRateDescription, String.class); addCustomField("taxRateType", taxRateType, String.class); } }
apache-2.0
stuhood/cassandra-old
contrib/pig/src/java/org/apache/cassandra/hadoop/pig/CassandraStorage.java
6341
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.cassandra.hadoop.pig; import java.io.IOException; import java.nio.ByteBuffer; import java.util.*; import org.apache.cassandra.db.Column; import org.apache.cassandra.db.IColumn; import org.apache.cassandra.db.SuperColumn; import org.apache.cassandra.hadoop.*; import org.apache.cassandra.thrift.SlicePredicate; import org.apache.cassandra.thrift.SliceRange; import org.apache.cassandra.utils.FBUtilities; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.pig.LoadFunc; import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; import org.apache.pig.data.DefaultDataBag; import org.apache.pig.data.DataByteArray; import org.apache.pig.data.Tuple; import org.apache.pig.data.TupleFactory; /** * A LoadFunc wrapping ColumnFamilyInputFormat. * * A row from a standard CF will be returned as nested tuples: (key, ((name1, val1), (name2, val2))). */ public class CassandraStorage extends LoadFunc { // system environment variables that can be set to configure connection info: // alternatively, Hadoop JobConf variables can be set using keys from ConfigHelper public final static String PIG_RPC_PORT = "PIG_RPC_PORT"; public final static String PIG_INITIAL_ADDRESS = "PIG_INITIAL_ADDRESS"; public final static String PIG_PARTITIONER = "PIG_PARTITIONER"; private final static ByteBuffer BOUND = FBUtilities.EMPTY_BYTE_BUFFER; private final static int LIMIT = 1024; private Configuration conf; private RecordReader reader; @Override public Tuple getNext() throws IOException { try { // load the next pair if (!reader.nextKeyValue()) return null; ByteBuffer key = (ByteBuffer)reader.getCurrentKey(); SortedMap<ByteBuffer,IColumn> cf = (SortedMap<ByteBuffer,IColumn>)reader.getCurrentValue(); assert key != null && cf != null; // and wrap it in a tuple Tuple tuple = TupleFactory.getInstance().newTuple(2); ArrayList<Tuple> columns = new ArrayList<Tuple>(); tuple.set(0, new DataByteArray(key.array(), key.position()+key.arrayOffset(), key.limit()+key.arrayOffset())); for (Map.Entry<ByteBuffer, IColumn> entry : cf.entrySet()) { columns.add(columnToTuple(entry.getKey(), entry.getValue())); } tuple.set(1, new DefaultDataBag(columns)); return tuple; } catch (InterruptedException e) { throw new IOException(e.getMessage()); } } private Tuple columnToTuple(ByteBuffer name, IColumn col) throws IOException { Tuple pair = TupleFactory.getInstance().newTuple(2); pair.set(0, new DataByteArray(name.array(), name.position()+name.arrayOffset(), name.limit()+name.arrayOffset())); if (col instanceof Column) { // standard pair.set(1, new DataByteArray(col.value().array(), col.value().position()+col.value().arrayOffset(), col.value().limit()+col.value().arrayOffset())); return pair; } // super ArrayList<Tuple> subcols = new ArrayList<Tuple>(); for (IColumn subcol : ((SuperColumn)col).getSubColumns()) subcols.add(columnToTuple(subcol.name(), subcol)); pair.set(1, new DefaultDataBag(subcols)); return pair; } @Override public InputFormat getInputFormat() { ColumnFamilyInputFormat inputFormat = new ColumnFamilyInputFormat(); return inputFormat; } @Override public void prepareToRead(RecordReader reader, PigSplit split) { this.reader = reader; } @Override public void setLocation(String location, Job job) throws IOException { // parse uri into keyspace and columnfamily String ksname, cfname; try { if (!location.startsWith("cassandra://")) throw new Exception("Bad scheme."); String[] parts = location.split("/+"); ksname = parts[1]; cfname = parts[2]; } catch (Exception e) { throw new IOException("Expected 'cassandra://<keyspace>/<columnfamily>': " + e.getMessage()); } // and configure SliceRange range = new SliceRange(BOUND, BOUND, false, LIMIT); SlicePredicate predicate = new SlicePredicate().setSlice_range(range); conf = job.getConfiguration(); ConfigHelper.setInputSlicePredicate(conf, predicate); ConfigHelper.setInputColumnFamily(conf, ksname, cfname); // check the environment for connection information if (System.getenv(PIG_RPC_PORT) != null) ConfigHelper.setRpcPort(conf, System.getenv(PIG_RPC_PORT)); if (System.getenv(PIG_INITIAL_ADDRESS) != null) ConfigHelper.setInitialAddress(conf, System.getenv(PIG_INITIAL_ADDRESS)); if (System.getenv(PIG_PARTITIONER) != null) ConfigHelper.setPartitioner(conf, System.getenv(PIG_PARTITIONER)); } @Override public String relativeToAbsolutePath(String location, Path curDir) throws IOException { return location; } }
apache-2.0
nimbus/camlistore
pkg/importer/picasa/picasa_test.go
1829
/* Copyright 2014 The Camlistore Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package picasa import ( "net/http" "testing" "camlistore.org/pkg/httputil" "camlistore.org/third_party/github.com/tgulacsi/picago" ) func TestGetUserId(t *testing.T) { userID := "11047045264" responder := httputil.FileResponder("testdata/users-me-res.xml") cl := &http.Client{ Transport: httputil.NewFakeTransport(map[string]func() *http.Response{ "https://picasaweb.google.com/data/feed/api/user/default/contacts?kind=user": responder, "https://picasaweb.google.com/data/feed/api/user/" + userID + "/contacts?kind=user": responder, })} inf, err := picago.GetUser(cl, "default") if err != nil { t.Fatal(err) } want := picago.User{ ID: userID, URI: "https://picasaweb.google.com/" + userID, Name: "Tamás Gulácsi", Thumbnail: "https://lh4.googleusercontent.com/-qqove344/AAAAAAAAAAI/AAAAAAABcbg/TXl3f2K9dzI/s64-c/11047045264.jpg", } if inf != want { t.Errorf("user info = %+v; want %+v", inf, want) } } func TestMediaURLsEqual(t *testing.T) { if !mediaURLsEqual("https://lh1.googleusercontent.com/foo.jpg", "https://lh100.googleusercontent.com/foo.jpg") { t.Fatal("want equal") } if mediaURLsEqual("https://foo.com/foo.jpg", "https://bar.com/foo.jpg") { t.Fatal("want not equal") } }
apache-2.0
agileowl/tapestry-5
tapestry-core/src/main/java/org/apache/tapestry5/internal/services/PageElementFactory.java
2630
// Copyright 2006, 2007, 2008, 2009 The Apache Software Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.apache.tapestry5.internal.services; import org.apache.tapestry5.Binding; import org.apache.tapestry5.ComponentResources; import org.apache.tapestry5.internal.parser.AttributeToken; import org.apache.tapestry5.internal.parser.ExpansionToken; import org.apache.tapestry5.ioc.Location; import org.apache.tapestry5.runtime.RenderCommand; import org.apache.tapestry5.services.BindingSource; /** * Used by the {@link org.apache.tapestry5.internal.services.PageLoader} to create partialar page elements. This has * evolved and focused to mostly concern bindings and expansions. */ public interface PageElementFactory { /** * Creates a RenderCommand for rendering an attribute, when the attribute contains expansions. * * @param componentResources identifies component * @param token token containing value with expansions * @return render command to render the text with expansions expanded */ RenderCommand newAttributeElement(ComponentResources componentResources, AttributeToken token); /** * Converts an expansion token into a command that renders the expanded value. * * @param componentResources identifies the component * @param token contains expansion expression * @return command to render expansion */ RenderCommand newExpansionElement(ComponentResources componentResources, ExpansionToken token); /** * Creates a new binding as with {@link BindingSource#newBinding(String, ComponentResources, ComponentResources, * String, String, Location)}. However, if the binding contains an expansion (i.e., <code>${...}</code>), then a * binding that returns the fully expanded expression will be returned. */ Binding newBinding(String parameterName, ComponentResources loadingComponentResources, ComponentResources embeddedComponentResources, String defaultBindingPrefix, String expression, Location location); }
apache-2.0
nince-wyj/jahhan
frameworkx/dubbo-common/src/main/java/com/frameworkx/annotation/Activate.java
2819
/* * Copyright 1999-2011 Alibaba Group. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.frameworkx.annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Activate * <p /> * 对于可以被框架中自动激活加载扩展,此Annotation用于配置扩展被自动激活加载条件。 * 比如,过滤扩展,有多个实现,使用Activate Annotation的扩展可以根据条件被自动加载。 * <ol> * <li>{@link Activate#group()}生效的Group。具体的有哪些Group值由框架SPI给出。 * <li>{@link Activate#value()}在{@link com.alibaba.dubbo.common.URL}中Key集合中有,则生效。 * </ol> * * <p /> * 底层框架SPI提供者通过{@link com.alibaba.dubbo.common.extension.ExtensionLoader}的{@link ExtensionLoader#getActivateExtension}方法 * 获得条件的扩展。 * * @author william.liangf * @author ding.lid * @export * @see SPI * @see ExtensionLoader * @see ExtensionLoader#getActivateExtension(com.alibaba.dubbo.common.URL, String[], String) */ @Documented @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.TYPE, ElementType.METHOD}) public @interface Activate { /** * Group过滤条件。 * <br /> * 包含{@link ExtensionLoader#getActivateExtension}的group参数给的值,则返回扩展。 * <br /> * 如没有Group设置,则不过滤。 */ String[] group() default {}; /** * Key过滤条件。包含{@link ExtensionLoader#getActivateExtension}的URL的参数Key中有,则返回扩展。 * <p /> * 示例:<br/> * 注解的值 <code>@Activate("cache,validatioin")</code>, * 则{@link ExtensionLoader#getActivateExtension}的URL的参数有<code>cache</code>Key,或是<code>validatioin</code>则返回扩展。 * <br/> * 如没有设置,则不过滤。 */ String[] value() default {}; /** * 排序信息,可以不提供。 */ String[] before() default {}; /** * 排序信息,可以不提供。 */ String[] after() default {}; /** * 排序信息,可以不提供。 */ int order() default 0; }
apache-2.0
Nephilim84/contestparser
spring-boot-samples/spring-boot-sample-metrics-opentsdb/src/test/java/sample/metrics/opentsdb/SampleOpenTsdbExportApplicationTests.java
1416
/* * Copyright 2012-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sample.metrics.opentsdb; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.boot.test.IntegrationTest; import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; /** * Basic integration tests for {@link SampleOpenTsdbExportApplication}. * * @author Dave Syer */ @RunWith(SpringJUnit4ClassRunner.class) @SpringApplicationConfiguration(SampleOpenTsdbExportApplication.class) @WebAppConfiguration @IntegrationTest("server.port=0") @DirtiesContext public class SampleOpenTsdbExportApplicationTests { @Test public void contextLoads() { } }
apache-2.0
wangzijian777/snmpTool
src/main/java/com/prince/snmp/tool/StartUp.java
379
package com.prince.snmp.tool; import java.io.IOException; /** * run this tool as the following format: * java -jar SnmpTool.jar simulator fileName * @author wangzijian * */ public class StartUp { public static void main(String[] args) { try { CommandFactory.getCommand(args).startUp(); } catch (IOException e) { e.printStackTrace(); } } }
apache-2.0
vetafi/vetafi-web
test/controllers/api/TwilioControllerTestContext.scala
3730
package controllers.api import java.time.Instant import java.util.UUID import com.google.inject.AbstractModule import com.mohiva.play.silhouette.api.LoginInfo import com.mohiva.play.silhouette.impl.providers.BasicAuthProvider import com.mohiva.play.silhouette.impl.util.SecureRandomIDGenerator import com.mohiva.play.silhouette.persistence.daos.{ DelegableAuthInfoDAO, MongoAuthInfoDAO } import com.typesafe.config.ConfigFactory import models.daos.{ ClaimDAO, FormDAO, TwilioFaxDAO } import models._ import net.codingwell.scalaguice.ScalaModule import org.mockito.Mockito import org.specs2.specification.Scope import play.api.inject.guice.GuiceApplicationBuilder import play.api.libs.json.JsValue import play.api.{ Application, Configuration } import services.documents.DocumentService import services.documents.pdf.PDFConcatenator import utils.auth.{ TwilioRequestValidator, TwilioRequestValidatorImpl } import utils.secrets.SecretsManager trait TwilioControllerTestContext extends Scope { val mockFormDao: FormDAO = Mockito.mock(classOf[FormDAO]) val mockDocumentService: DocumentService = Mockito.mock(classOf[DocumentService]) val mockBasicAuthProvider: BasicAuthProvider = Mockito.mock(classOf[BasicAuthProvider]) val mockTwilioUserDao: DelegableAuthInfoDAO[TwilioUser] = Mockito.mock(classOf[DelegableAuthInfoDAO[TwilioUser]]) val mockPdfConcatenator: PDFConcatenator = Mockito.mock(classOf[PDFConcatenator]) val mockSecureRandomIdGenerator: SecureRandomIDGenerator = Mockito.mock(classOf[SecureRandomIDGenerator]) val mockConfiguration: Configuration = Mockito.mock(classOf[Configuration]) val mockSecretsManager: SecretsManager = Mockito.mock(classOf[SecretsManager]) val mockClaimDao: ClaimDAO = Mockito.mock(classOf[ClaimDAO]) val mockTwilioFaxDao: TwilioFaxDAO = Mockito.mock(classOf[TwilioFaxDAO]) Mockito.when(mockConfiguration.get[String]("twilio.authTokenSecretName")) .thenReturn("fakeSecretName") Mockito.when(mockSecretsManager.getSecretUtf8("fakeSecretName")).thenReturn("12345") val requestValidator = new TwilioRequestValidatorImpl(mockConfiguration, mockSecretsManager) val userID: UUID = UUID.randomUUID() /** * An identity. */ var identity = User( userID = userID, loginInfo = LoginInfo("credentials", "user@website.com"), firstName = None, lastName = None, fullName = None, email = None, avatarURL = None, activated = true, contact = None) var testClaim = Claim( userID = identity.userID, claimID = UUID.randomUUID(), key = "fakeKey", state = Claim.State.INCOMPLETE, stateUpdatedAt = java.util.Date.from(Instant.now()), recipients = Seq( Recipient(Recipient.Type.FAX, "18005555555"), Recipient(Recipient.Type.EMAIL, "test@x.com"))) var testForm = ClaimForm("VBA-21-0966-ARE", Map.empty[String, JsValue], identity.userID, testClaim.claimID, 0, 0, 0, 0) class FakeModule extends AbstractModule with ScalaModule { def configure(): Unit = { bind[FormDAO].toInstance(mockFormDao) bind[ClaimDAO].toInstance(mockClaimDao) bind[TwilioFaxDAO].toInstance(mockTwilioFaxDao) bind[DocumentService].toInstance(mockDocumentService) bind[BasicAuthProvider].toInstance(mockBasicAuthProvider) bind[DelegableAuthInfoDAO[TwilioUser]].toInstance(mockTwilioUserDao) bind[PDFConcatenator].toInstance(mockPdfConcatenator) bind[SecureRandomIDGenerator].toInstance(mockSecureRandomIdGenerator) bind[TwilioRequestValidator].toInstance(requestValidator) } } val application: Application = GuiceApplicationBuilder() .configure(Configuration(ConfigFactory.load("application.test.conf"))) .overrides(new FakeModule) .build() }
apache-2.0
ExplorViz/explorviz-ui-frontend
tests/unit/models/clazz-test.ts
411
import { module, test } from 'qunit'; import { setupTest } from 'ember-qunit'; import { run } from '@ember/runloop'; module('Unit | Model | clazz', function(hooks) { setupTest(hooks); // Replace this with your real tests. test('it exists', function(assert) { let store = this.owner.lookup('service:store'); let model = run(() => store.createRecord('clazz', {})); assert.ok(model); }); });
apache-2.0
iamovrhere/CareerStack
src/com/ovrhere/android/careerstack/utils/UnitCheck.java
3992
/* * Copyright 2014 Jason J. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.ovrhere.android.careerstack.utils; import com.ovrhere.android.careerstack.R; import android.content.SharedPreferences; import android.content.res.Resources; /** Used abundantly throughout the application so figured do it once. * Only for use within CareerStack. * @author Jason J. * @version 0.1.0-20140922 */ public class UnitCheck { /** Returns strings for either * <code>R.string.careerstack_formatString_distanceValue_km</code> * or <code> R.string.careerstack_formatString_distanceValue_miles</code> * based on bool value of pref * <code>R.string.careerstack_pref_KEY_USE_METRIC</code> * @param prefs Preference handle * @param r Resources handle * @param formatArgs The arguments to insert into * {@link Resources#getString(int, Object...)} * @return string values */ static public String units(SharedPreferences prefs, Resources r, Object... formatArgs){ return r.getString( unitsId(prefs, r), formatArgs); } /** Returns strings for either * <code>R.string.careerstack_formatString_distanceValue_km</code> or * <code> R.string.careerstack_formatString_distanceValue_milesShort</code> * based on bool value of pref * <code>R.string.careerstack_pref_KEY_USE_METRIC</code> * @param prefs Preference handle * @param r Resources handle * @param formatArgs The arguments to insert into * {@link Resources#getString(int, Object...)} * @return String values */ static public String unitsShort(SharedPreferences prefs, Resources r, Object... formatArgs){ return r.getString( unitsIdShort(prefs, r), formatArgs); } /** Returns either * <code>R.string.careerstack_formatString_distanceValue_km</code> * or <code> R.string.careerstack_formatString_distanceValue_miles</code> * based on bool value of pref * <code>R.string.careerstack_pref_KEY_USE_METRIC</code> * @param prefs Preference handle * @param r Resources handle * @return <code>R.string.careerstack_formatString_distanceValue_km</code> * or <code> R.string.careerstack_formatString_distanceValue_miles</code> */ static public int unitsId(SharedPreferences prefs, Resources r){ if (useMetric(prefs, r)){ //if using metric return R.string.careerstack_formatString_distanceValue_km; } return R.string.careerstack_formatString_distanceValue_miles; } /** Returns either * <code>R.string.careerstack_formatString_distanceValue_km</code> * or <code> R.string.careerstack_formatString_distanceValue_milesShort</code> * based on bool value of pref * <code>R.string.careerstack_pref_KEY_USE_METRIC</code> * @param prefs Preference handle * @param r Resources handle * @return <code>R.string.careerstack_formatString_distanceValue_km</code> * or <code> R.string.careerstack_formatString_distanceValue_milesShort</code> */ static public int unitsIdShort(SharedPreferences prefs, Resources r){ if (useMetric(prefs, r)){ //if using metric return R.string.careerstack_formatString_distanceValue_km; } return R.string.careerstack_formatString_distanceValue_milesShort; } /** Returns <code>true</code> if set to metric, <code>false</code> if set * to imperial. * @param prefs * @param r */ public static boolean useMetric(SharedPreferences prefs, Resources r) { return prefs.getBoolean( r.getString(R.string.careerstack_pref_KEY_USE_MILES), false) == false; } }
apache-2.0
evertrue/s3_dir
spec/spec_helper.rb
258
# Encoding: utf-8 require 'coveralls' Coveralls.wear! require 'chefspec' require 'chefspec/berkshelf' RSpec.configure do |config| config.color = true config.formatter = :documentation config.platform = 'ubuntu' config.version = '16.04' end
apache-2.0
levackt/resilience-demo
hello-hystrix/src/main/java/io/fabric8/kubeflix/examples/hellohystrix/HelloHystrixServlet.java
1358
/* * Copyright (C) 2015 Red Hat, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.fabric8.kubeflix.examples.hellohystrix; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; public class HelloHystrixServlet extends HttpServlet { @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { // Set response content type resp.setContentType("text/html"); // Actual logic goes here. PrintWriter out = resp.getWriter(); HelloCommand helloCommand = new HelloCommand(); out.println("<h1>" + helloCommand.execute() + "</h1>"); } }
apache-2.0
AdrianAndroid/zuitangxuan
zuitangxuan/src/com/zuitangxuan/database/T008permissionGroup/InterfPermissionGroupKey.java
438
package com.zuitangxuan.database.T008permissionGroup; import com.zuitangxuan.database.InterfBaseKey; public interface InterfPermissionGroupKey extends InterfBaseKey { public static final String TABLE_NAME = "operationPermissions"; // public static final String ID = "id"; // /**分店*/ // public static final String SUBBRANCH = "subbranch"; /**权限组名称*/ public static final String PERMISSION_GROUP_NAME = "groupName"; }
apache-2.0
oMMuCo/HPTT-FT-UGM-Official-Website
protected/modules/article/controllers/LikeController.php
3901
<?php /** * LikeController * @var $this LikeController * @var $model ArticleLikes * @var $form CActiveForm * version: 0.0.1 * Reference start * * TOC : * Index * Up * Down * * LoadModel * performAjaxValidation * * @author Putra Sudaryanto <putra.sudaryanto@gmail.com> * @copyright Copyright (c) 2012 Ommu Platform (ommu.co) * @link https://github.com/oMMu/Ommu-Articles * @contect (+62)856-299-4114 * *---------------------------------------------------------------------------------------------------------- */ class LikeController extends Controller { /** * @var string the default layout for the views. Defaults to '//layouts/column2', meaning * using two-column layout. See 'protected/views/layouts/column2.php'. */ //public $layout='//layouts/column2'; public $defaultAction = 'index'; /** * Initialize admin page theme */ public function init() { if(ArticleSetting::getInfo('permission') == 1) { $arrThemes = Utility::getCurrentTemplate('public'); Yii::app()->theme = $arrThemes['folder']; $this->layout = $arrThemes['layout']; } else { $this->redirect(Yii::app()->createUrl('site/index')); } } /** * @return array action filters */ public function filters() { return array( 'accessControl', // perform access control for CRUD operations //'postOnly + delete', // we only allow deletion via POST request ); } /** * Specifies the access control rules. * This method is used by the 'accessControl' filter. * @return array access control rules */ public function accessRules() { return array( array('allow', // allow all users to perform 'index' and 'view' actions 'actions'=>array('index'), 'users'=>array('*'), ), array('allow', // allow authenticated user to perform 'create' and 'update' actions 'actions'=>array('up','down'), 'users'=>array('@'), 'expression'=>'isset(Yii::app()->user->level)', //'expression'=>'isset(Yii::app()->user->level) && (Yii::app()->user->level != 1)', ), array('allow', // allow admin user to perform 'admin' and 'delete' actions 'actions'=>array(), 'users'=>array('admin'), ), array('deny', // deny all users 'users'=>array('*'), ), ); } /** * Lists all models. */ public function actionIndex() { $this->redirect(Yii::app()->createUrl('site/index')); } /** * Creates a new model. * If creation is successful, the browser will be redirected to the 'view' page. */ public function actionUp($id=null) { if($id == null) { $this->redirect(array('site/index')); } else { $model=new ArticleLikes; $model->article_id = $id; if($model->save()) { $this->redirect(array('site/view','id'=>$model->article_id,'t'=>Utility::getUrlTitle($model->article->title))); } } } /** * Creates a new model. * If creation is successful, the browser will be redirected to the 'view' page. */ public function actionDown($id=null) { if($id == null) { $this->redirect(array('site/index')); } else { $model=$this->loadModel($id); if($model->delete()) { $this->redirect(array('site/view','id'=>$model->article_id,'t'=>Utility::getUrlTitle($model->article->title))); } } } /** * Returns the data model based on the primary key given in the GET variable. * If the data model is not found, an HTTP exception will be raised. * @param integer the ID of the model to be loaded */ public function loadModel($id) { $model = ArticleLikes::model()->findByPk($id); if($model===null) throw new CHttpException(404, Phrase::trans(193,0)); return $model; } /** * Performs the AJAX validation. * @param CModel the model to be validated */ protected function performAjaxValidation($model) { if(isset($_POST['ajax']) && $_POST['ajax']==='article-likes-form') { echo CActiveForm::validate($model); Yii::app()->end(); } } }
apache-2.0
99soft/guartz
src/test/java/org/nnsoft/guice/guartz/RepeatedSchedulingTestCase.java
1793
package org.nnsoft.guice.guartz; /* * Copyright 2009-2012 The 99 Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import com.google.inject.Inject; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.quartz.Scheduler; import static com.google.inject.Guice.createInjector; import static junit.framework.Assert.assertTrue; public class RepeatedSchedulingTestCase { @Inject private TimedTask timedTask; @Inject private Scheduler scheduler; @Before public void setUp() throws Exception { createInjector( new QuartzModule() { @Override protected void schedule() { scheduleJob( TimedTask.class ).updateExistingTrigger(); scheduleJob( TimedTask.class ).updateExistingTrigger(); } } ).getMembersInjector( RepeatedSchedulingTestCase.class ).injectMembers( this ); } @After public void tearDown() throws Exception { this.scheduler.shutdown(); } @Test public void minimalTest() throws Exception { Thread.sleep( 5000 ); assertTrue( this.timedTask.getInvocationsTimedTaskA() > 0 ); } }
apache-2.0
bhaecke/export-distro
src/main/java/org/edgexfoundry/serviceactivator/RESTOutboundServiceActivator.java
2876
/******************************************************************************* * Copyright 2017 Dell Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @microservice: export-distro * @author: Jim White, Dell * @version: 1.0.0 *******************************************************************************/ package org.edgexfoundry.serviceactivator; import org.edgexfoundry.domain.export.ExportString; import org.edgexfoundry.domain.meta.Addressable; import org.springframework.http.HttpMethod; import org.springframework.integration.annotation.MessageEndpoint; import org.springframework.integration.annotation.ServiceActivator; import org.springframework.integration.http.outbound.HttpRequestExecutingMessageHandler; import org.springframework.integration.support.MessageBuilder; import org.springframework.messaging.Message; /** * Send export data to REST channel of client's desire. * * @author jim_white * */ @MessageEndpoint public class RESTOutboundServiceActivator { // private static final Logger logger = // Logger.getLogger(RESTOutboundServiceActivator.class); // replace above logger with EdgeXLogger below private final static org.edgexfoundry.support.logging.client.EdgeXLogger logger = org.edgexfoundry.support.logging.client.EdgeXLoggerFactory .getEdgeXLogger(RESTOutboundServiceActivator.class); @ServiceActivator(inputChannel = "outbound-rest", outputChannel = "mark-outboud") public String restOutbound(Message<?> msg) { try { ExportString exportString = (ExportString) msg.getPayload(); logger.debug("message arrived at REST outbound sender: " + exportString.getEventId()); Addressable addressable = exportString.getRegistration().getAddressable(); String uri = addressable.getAddress() + ":" + addressable.getPort() + addressable.getPath(); HttpRequestExecutingMessageHandler handler = new HttpRequestExecutingMessageHandler(uri); handler.setHttpMethod(HttpMethod.POST); handler.setExpectReply(false); Message<String> message = MessageBuilder.withPayload(exportString.getEventString()).build(); handler.handleMessage(message); logger.info("message sent to REST address: " + uri + " : " + exportString.getEventId()); return exportString.getEventId(); } catch (Exception e) { logger.error("Problem with sending message via REST: " + e.getMessage()); return null; } } }
apache-2.0
youdonghai/intellij-community
plugins/InspectionGadgets/src/com/intellij/codeInspection/TrivialFunctionalExpressionUsageInspection.java
12221
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Condition; import com.intellij.psi.*; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.MethodSignatureUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.util.InlineUtil; import com.intellij.refactoring.util.LambdaRefactoringUtil; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import java.util.Collections; import java.util.List; public class TrivialFunctionalExpressionUsageInspection extends BaseJavaBatchLocalInspectionTool { @NotNull @Override public PsiElementVisitor buildVisitor(@NotNull final ProblemsHolder holder, boolean isOnTheFly) { return new JavaElementVisitor() { @Override public void visitMethodReferenceExpression(final PsiMethodReferenceExpression expression) { doCheckMethodCallOnFunctionalExpression(expression, element -> expression.resolve() != null); } @Override public void visitLambdaExpression(final PsiLambdaExpression expression) { doCheckMethodCallOnFunctionalExpression(expression, ggParent -> { final PsiElement callParent = ggParent.getParent(); final PsiElement body = expression.getBody(); if (!(body instanceof PsiCodeBlock)) { return callParent instanceof PsiStatement || callParent instanceof PsiLocalVariable || expression.isValueCompatible(); } if (((PsiCodeBlock)body).getStatements().length == 1) { return callParent instanceof PsiStatement || callParent instanceof PsiLocalVariable || ((PsiCodeBlock)body).getStatements()[0] instanceof PsiReturnStatement && expression.isValueCompatible(); } final List<PsiExpression> returnExpressions = LambdaUtil.getReturnExpressions(expression); if (returnExpressions.size() > 1) { return false; } if (returnExpressions.isEmpty()) { return callParent instanceof PsiStatement; } return callParent instanceof PsiStatement || callParent instanceof PsiLocalVariable; }); } @Override public void visitAnonymousClass(final PsiAnonymousClass aClass) { if (AnonymousCanBeLambdaInspection.canBeConvertedToLambda(aClass, false, Collections.emptySet())) { final PsiElement newExpression = aClass.getParent(); doCheckMethodCallOnFunctionalExpression(ggParent -> { final PsiMethod method = aClass.getMethods()[0]; final PsiCodeBlock body = method.getBody(); final PsiReturnStatement[] returnStatements = PsiUtil.findReturnStatements(body); if (returnStatements.length > 1) { return false; } final PsiElement callParent = ggParent.getParent(); return callParent instanceof PsiStatement || callParent instanceof PsiLocalVariable; }, newExpression, aClass.getBaseClassType(), new ReplaceAnonymousWithLambdaBodyFix()); } } private void doCheckMethodCallOnFunctionalExpression(PsiElement expression, Condition<PsiElement> elementContainerCondition) { final PsiElement parent = PsiUtil.skipParenthesizedExprUp(expression.getParent()); if (parent instanceof PsiTypeCastExpression) { final PsiType interfaceType = ((PsiTypeCastExpression)parent).getType(); doCheckMethodCallOnFunctionalExpression(elementContainerCondition, parent, interfaceType, expression instanceof PsiLambdaExpression ? new ReplaceWithLambdaBodyFix() : new ReplaceWithMethodReferenceFix()); } } private void doCheckMethodCallOnFunctionalExpression(Condition<PsiElement> elementContainerCondition, PsiElement parent, PsiType interfaceType, LocalQuickFix fix) { final PsiElement gParent = PsiUtil.skipParenthesizedExprUp(parent.getParent()); if (gParent instanceof PsiReferenceExpression) { final PsiElement ggParent = gParent.getParent(); if (ggParent instanceof PsiMethodCallExpression) { final PsiMethod resolveMethod = ((PsiMethodCallExpression)ggParent).resolveMethod(); final PsiElement referenceNameElement = ((PsiMethodCallExpression)ggParent).getMethodExpression().getReferenceNameElement(); if (resolveMethod != null && !resolveMethod.isVarArgs() && ((PsiMethodCallExpression)ggParent).getArgumentList().getExpressions().length == resolveMethod.getParameterList().getParametersCount() && referenceNameElement != null && elementContainerCondition.value(ggParent)) { final PsiMethod interfaceMethod = LambdaUtil.getFunctionalInterfaceMethod(interfaceType); if (resolveMethod == interfaceMethod || interfaceMethod != null && MethodSignatureUtil.isSuperMethod(interfaceMethod, resolveMethod)) { holder.registerProblem(referenceNameElement, "Method call can be simplified", fix); } } } } } }; } private static void replaceWithLambdaBody(PsiMethodCallExpression callExpression, PsiLambdaExpression element) { inlineCallArguments(callExpression, element); final PsiElement body = element.getBody(); if (body instanceof PsiExpression) { callExpression.replace(body); } else if (body instanceof PsiCodeBlock) { final PsiElement parent = callExpression.getParent(); if (parent instanceof PsiStatement) { final PsiElement gParent = parent.getParent(); restoreComments(gParent, parent, body); for (PsiStatement statement : ((PsiCodeBlock)body).getStatements()) { PsiElement toInsert; if (statement instanceof PsiReturnStatement) { toInsert = ((PsiReturnStatement)statement).getReturnValue(); } else { toInsert = statement; } if (toInsert != null) { gParent.addBefore(toInsert, parent); } } parent.delete(); } else { final PsiStatement[] statements = ((PsiCodeBlock)body).getStatements(); if (statements.length > 0) { final PsiStatement anchor = PsiTreeUtil.getParentOfType(parent, PsiStatement.class); if (anchor != null) { final PsiElement gParent = anchor.getParent(); restoreComments(gParent, anchor, body); for (int i = 0; i < statements.length - 1; i++) { gParent.addBefore(statements[i], anchor); } } PsiStatement statement = statements[statements.length - 1]; final PsiExpression returnValue = ((PsiReturnStatement)statement).getReturnValue(); if (returnValue != null) { callExpression.replace(returnValue); } } } } } private static void restoreComments(PsiElement gParent, PsiElement parent, PsiElement body) { for (PsiElement comment : PsiTreeUtil.findChildrenOfType(body, PsiComment.class)) { gParent.addBefore(comment, parent); } } private static void inlineCallArguments(PsiMethodCallExpression callExpression, PsiLambdaExpression element) { final PsiExpression[] args = callExpression.getArgumentList().getExpressions(); final PsiParameter[] parameters = element.getParameterList().getParameters(); for (int i = 0; i < parameters.length; i++) { final PsiParameter parameter = parameters[i]; final PsiExpression initializer = args[i]; for (PsiReference reference : ReferencesSearch.search(parameter)) { final PsiElement referenceElement = reference.getElement(); if (referenceElement instanceof PsiJavaCodeReferenceElement) { InlineUtil.inlineVariable(parameter, initializer, (PsiJavaCodeReferenceElement)referenceElement); } } } } private static class ReplaceWithLambdaBodyFix extends ReplaceFix { @Nls @NotNull @Override public String getFamilyName() { return "Replace method call on lambda with lambda body"; } @Override protected void fixExpression(PsiMethodCallExpression callExpression, PsiExpression qualifierExpression) { if (qualifierExpression instanceof PsiTypeCastExpression) { final PsiExpression element = ((PsiTypeCastExpression)qualifierExpression).getOperand(); if (element instanceof PsiLambdaExpression) { replaceWithLambdaBody(callExpression, (PsiLambdaExpression)element); } } } } private static class ReplaceWithMethodReferenceFix extends ReplaceFix { @Nls @NotNull @Override public String getFamilyName() { return "Replace method call on method reference with corresponding method call"; } @Override protected void fixExpression(PsiMethodCallExpression callExpression, PsiExpression qualifierExpression) { if (qualifierExpression instanceof PsiTypeCastExpression) { final PsiExpression element = ((PsiTypeCastExpression)qualifierExpression).getOperand(); if (element instanceof PsiMethodReferenceExpression) { final PsiLambdaExpression lambdaExpression = LambdaRefactoringUtil.convertMethodReferenceToLambda((PsiMethodReferenceExpression)element, false, true); if (lambdaExpression != null) { replaceWithLambdaBody(callExpression, lambdaExpression); } } } } } private static class ReplaceAnonymousWithLambdaBodyFix extends ReplaceFix { @Nls @NotNull @Override public String getFamilyName() { return "Replace call with method body"; } @Override protected void fixExpression(PsiMethodCallExpression callExpression, PsiExpression qualifierExpression) { final PsiExpression cast = AnonymousCanBeLambdaInspection.replacePsiElementWithLambda(qualifierExpression, true, false); if (cast instanceof PsiTypeCastExpression) { final PsiExpression lambdaExpression = ((PsiTypeCastExpression)cast).getOperand(); if (lambdaExpression instanceof PsiLambdaExpression) { replaceWithLambdaBody(callExpression, (PsiLambdaExpression)lambdaExpression); } } } } private static abstract class ReplaceFix implements LocalQuickFix { @Nls @NotNull @Override public String getName() { return getFamilyName(); } @Override public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { final PsiElement psiElement = descriptor.getPsiElement(); final PsiMethodCallExpression callExpression = PsiTreeUtil.getParentOfType(psiElement, PsiMethodCallExpression.class); if (callExpression != null) { fixExpression(callExpression, PsiUtil.skipParenthesizedExprDown(callExpression.getMethodExpression().getQualifierExpression())); } } protected abstract void fixExpression(PsiMethodCallExpression callExpression, PsiExpression qualifierExpression); } }
apache-2.0
huangchuchuan/Spider
BaiduZhidaoCommentSpider/baidu_zhidao_comment_spider.py
5584
# -*- coding: utf-8 -*- # @Author : Huangcc import requests import urllib from lxml import etree import re import datetime import codecs import time import random SLEEP = [0.5, 1, 1.5, 2, 2.5, 3] class BaiduZhidao(): search_url = 'https://zhidao.baidu.com/search?word={keyword}&ie=gbk&site=-1&sites=0&date=0&pn=PAGE' my_headers = { 'Host': 'zhidao.baidu.com', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en-US,en;q=0.5', 'Accept-Encoding': 'gzip, deflate, br', } comment_url = 'https://zhidao.baidu.com/question/{question_id}.html?sort=9&rn=5&pn=PAGE#wgt-answers' def __init__(self, keyword): self.session = requests.Session() self.keyword = keyword self.search_url = self.search_url.format(keyword=urllib.quote(keyword.decode('utf-8').encode('gbk'))) self.question_ids = [] self.filename = 'baidu_zhidao-comments-%s.csv' % (datetime.datetime.now().strftime('%Y%m%d-%H%M')) def set_keyword(self, keyword): self.keyword = keyword def reset_filename(self): self.filename = 'baidu_zhidao-comments-%s.csv' % (datetime.datetime.now().strftime('%Y%m%d-%H%M')) @staticmethod def extract_question_id(url): pattern = '/question/(\d+?)\.' result = re.findall(pattern, url) if result: return result[0] else: return None @staticmethod def html_filter(html_text): html_text = html_text.replace('\n', '').replace('\t', ' ') pattern = re.compile(r'<[^>]+>', re.S) no_html_text = pattern.sub('', html_text) return no_html_text def search(self, page=0): print '-*- start search with page %d -*-' % (page / 10 + 1) time.sleep(SLEEP[random.randint(0, len(SLEEP) - 1)]) resp = self.session.get(url=self.search_url.replace('PAGE', str(page)), headers=self.my_headers) if resp.status_code == 200: response = etree.HTML(resp.text) urls = response.xpath('//a[@class="ti"]/@href') self.question_ids.extend(filter(lambda x: True if x else False, map(self.extract_question_id, urls))) next_page = response.xpath('//a[@class="pager-next"]/@href') if next_page: next_page_number = re.findall('&pn=(\d+)$', next_page[0]) if next_page_number: next_page_number = int(next_page_number[0]) else: next_page_number = 0 self.search(page=next_page_number) # 递归调用直到没有下一页 else: print '=*= end search with page %d =*=' % (page / 10 + 1) else: print 'Error status code %d in getting search result with page %d' % (resp.status_code, (page / 10 + 1)) print resp.content def print_question_ids(self): print self.question_ids def find_comments(self): total = len(self.question_ids) for i, question_id in enumerate(self.question_ids): print '|*| start get content from question id %s - %d/%d |*|' % (question_id, i + 1, total) url = self.comment_url.format(question_id=question_id) self.comment(url) print '_*_ end get content from question id %s - %d/%d _*_' % (question_id, i + 1, total) def comment(self, url, page=0): print ' * start get comments with page %d *' % (page / 5 + 1) time.sleep(SLEEP[random.randint(0, len(SLEEP) - 1)]) resp = self.session.get(url.replace('PAGE', str(page)), headers=self.my_headers, allow_redirects=False) if resp.status_code != 200: print 'Error status code %d in getting comment result with page %d' % (resp.status_code, (page / 5 + 1)) print resp.content else: response = etree.HTML(resp.content) comment_nodes = response.xpath('//span[@class="con"]') comments = [] for node in comment_nodes: print node.xpath('string(.)') comments.append(node.xpath('string(.)').strip()) print ' | get %d comments | ' % len(comments) # 获取问题 ask_title = response.xpath('//title/text()') if ask_title: ask_title = ask_title[0] else: ask_title = "" if comments: comments = map(self.html_filter, comments) with codecs.open(self.filename, 'a', encoding='utf-8') as f: for data in comments: f.write(ask_title + '|' + data + '\n') next_page = response.xpath('//a[@class="pager-next"]/@href') if next_page: next_page_number = re.findall('&pn=(\d+)#', next_page[0]) if next_page_number: next_page_number = int(next_page_number[0]) else: next_page_number = 0 self.comment(url, next_page_number) # 递归调用直到没有下一页 else: print ' - end get comments with page %d -' % (page / 5 + 1) if __name__ == '__main__': baidu_zhidao = BaiduZhidao('美年大健康') baidu_zhidao.search() baidu_zhidao.find_comments()
apache-2.0
cowthan/JavaAyo
partyzoo/src-okhttputils/com/lzy/okhttputils/cookie/store/HasCookieStore.java
113
package com.lzy.okhttputils.cookie.store; public interface HasCookieStore { CookieStore getCookieStore(); }
apache-2.0
ambasta/aws-sdk-cpp
aws-cpp-sdk-cognito-sync/source/model/CognitoStreams.cpp
2209
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include <aws/cognito-sync/model/CognitoStreams.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace CognitoSync { namespace Model { CognitoStreams::CognitoStreams() : m_streamNameHasBeenSet(false), m_roleArnHasBeenSet(false), m_streamingStatusHasBeenSet(false) { } CognitoStreams::CognitoStreams(const JsonValue& jsonValue) : m_streamNameHasBeenSet(false), m_roleArnHasBeenSet(false), m_streamingStatusHasBeenSet(false) { *this = jsonValue; } CognitoStreams& CognitoStreams::operator =(const JsonValue& jsonValue) { if(jsonValue.ValueExists("StreamName")) { m_streamName = jsonValue.GetString("StreamName"); m_streamNameHasBeenSet = true; } if(jsonValue.ValueExists("RoleArn")) { m_roleArn = jsonValue.GetString("RoleArn"); m_roleArnHasBeenSet = true; } if(jsonValue.ValueExists("StreamingStatus")) { m_streamingStatus = StreamingStatusMapper::GetStreamingStatusForName(jsonValue.GetString("StreamingStatus")); m_streamingStatusHasBeenSet = true; } return *this; } JsonValue CognitoStreams::Jsonize() const { JsonValue payload; if(m_streamNameHasBeenSet) { payload.WithString("StreamName", m_streamName); } if(m_roleArnHasBeenSet) { payload.WithString("RoleArn", m_roleArn); } if(m_streamingStatusHasBeenSet) { payload.WithString("StreamingStatus", StreamingStatusMapper::GetNameForStreamingStatus(m_streamingStatus)); } return payload; } } // namespace Model } // namespace CognitoSync } // namespace Aws
apache-2.0
uCloudCastle/TajiProject
app/src/main/utils/java/com/randal/aviana/StringUtils.java
5179
package com.randal.aviana; public final class StringUtils { private StringUtils() { throw new UnsupportedOperationException("DO NOT INSTANTIATE THIS CLASS"); } /** * 判断字符串是否为null或长度为0 * * @param s 待校验字符串 * @return {@code true}: 空<br> {@code false}: 不为空 */ public static boolean isEmpty(CharSequence s) { return s == null || s.length() == 0; } /** * 判断字符串是否为null或全为空格 * * @param s 待校验字符串 * @return {@code true}: null或全空格<br> {@code false}: 不为null且不全空格 */ public static boolean isTrimEmpty(String s) { return (s == null || s.trim().length() == 0); } /** * 判断字符串是否为null或全为空白字符 * * @param s 待校验字符串 * @return {@code true}: null或全空白字符<br> {@code false}: 不为null且不全空白字符 */ public static boolean isSpace(String s) { if (s == null) return true; for (int i = 0, len = s.length(); i < len; ++i) { if (!Character.isWhitespace(s.charAt(i))) { return false; } } return true; } /** * 判断两字符串是否相等 * * @param a 待校验字符串a * @param b 待校验字符串b * @return {@code true}: 相等<br>{@code false}: 不相等 */ public static boolean equals(CharSequence a, CharSequence b) { if (a == b) return true; int length; if (a != null && b != null && (length = a.length()) == b.length()) { if (a instanceof String && b instanceof String) { return a.equals(b); } else { for (int i = 0; i < length; i++) { if (a.charAt(i) != b.charAt(i)) return false; } return true; } } return false; } /** * 判断两字符串忽略大小写是否相等 * * @param a 待校验字符串a * @param b 待校验字符串b * @return {@code true}: 相等<br>{@code false}: 不相等 */ public static boolean equalsIgnoreCase(String a, String b) { return a == null ? b == null : a.equalsIgnoreCase(b); } /** * null转为长度为0的字符串 * * @param s 待转字符串 * @return s为null转为长度为0字符串,否则不改变 */ public static String null2Length0(String s) { return s == null ? "" : s; } /** * 返回字符串长度 * * @param s 字符串 * @return null返回0,其他返回自身长度 */ public static int length(CharSequence s) { return s == null ? 0 : s.length(); } /** * 首字母大写 * * @param s 待转字符串 * @return 首字母大写字符串 */ public static String upperFirstLetter(String s) { if (isEmpty(s) || !Character.isLowerCase(s.charAt(0))) return s; return String.valueOf((char) (s.charAt(0) - 32)) + s.substring(1); } /** * 首字母小写 * * @param s 待转字符串 * @return 首字母小写字符串 */ public static String lowerFirstLetter(String s) { if (isEmpty(s) || !Character.isUpperCase(s.charAt(0))) return s; return String.valueOf((char) (s.charAt(0) + 32)) + s.substring(1); } /** * 反转字符串 * * @param s 待反转字符串 * @return 反转字符串 */ public static String reverse(String s) { int len = length(s); if (len <= 1) return s; int mid = len >> 1; char[] chars = s.toCharArray(); char c; for (int i = 0; i < mid; ++i) { c = chars[i]; chars[i] = chars[len - i - 1]; chars[len - i - 1] = c; } return new String(chars); } /** * 转化为半角字符 * * @param s 待转字符串 * @return 半角字符串 */ public static String toDBC(String s) { if (isEmpty(s)) return s; char[] chars = s.toCharArray(); for (int i = 0, len = chars.length; i < len; i++) { if (chars[i] == 12288) { chars[i] = ' '; } else if (65281 <= chars[i] && chars[i] <= 65374) { chars[i] = (char) (chars[i] - 65248); } else { chars[i] = chars[i]; } } return new String(chars); } /** * 转化为全角字符 * * @param s 待转字符串 * @return 全角字符串 */ public static String toSBC(String s) { if (isEmpty(s)) return s; char[] chars = s.toCharArray(); for (int i = 0, len = chars.length; i < len; i++) { if (chars[i] == ' ') { chars[i] = (char) 12288; } else if (33 <= chars[i] && chars[i] <= 126) { chars[i] = (char) (chars[i] + 65248); } else { chars[i] = chars[i]; } } return new String(chars); } }
apache-2.0
mdavid/SuperSocket
v1.4/Test/TestSession.cs
920
using System; using System.Collections.Generic; using System.Linq; using System.Text; using SuperSocket.SocketBase; using SuperSocket.SocketBase.Command; namespace SuperSocket.Test { public class TestSession : AppSession<TestSession> { public const string WelcomeMessageFormat = "Welcome to {0}"; public const string UnknownCommandMessageFormat = "Unknown command: {0}"; public override void StartSession() { if(AppServer.Config.Mode != SocketMode.Udp) SendResponse(string.Format(WelcomeMessageFormat, AppServer.Name)); } public override void HandleExceptionalError(Exception e) { } public override void HandleUnknownCommand(StringCommandInfo cmdInfo) { SendResponse(string.Format(UnknownCommandMessageFormat, cmdInfo.Key)); } } }
apache-2.0
fhanik/spring-security
web/src/test/java/org/springframework/security/web/access/channel/InsecureChannelProcessorTests.java
4646
/* * Copyright 2004, 2005, 2006 Acegi Technology Pty Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.web.access.channel; import javax.servlet.FilterChain; import org.junit.Test; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.security.access.SecurityConfig; import org.springframework.security.web.FilterInvocation; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.mockito.Mockito.mock; /** * Tests {@link InsecureChannelProcessor}. * * @author Ben Alex */ public class InsecureChannelProcessorTests { @Test public void testDecideDetectsAcceptableChannel() throws Exception { MockHttpServletRequest request = new MockHttpServletRequest(); request.setQueryString("info=true"); request.setServerName("localhost"); request.setContextPath("/bigapp"); request.setServletPath("/servlet"); request.setScheme("http"); request.setServerPort(8080); MockHttpServletResponse response = new MockHttpServletResponse(); FilterInvocation fi = new FilterInvocation(request, response, mock(FilterChain.class)); InsecureChannelProcessor processor = new InsecureChannelProcessor(); processor.decide(fi, SecurityConfig.createList("SOME_IGNORED_ATTRIBUTE", "REQUIRES_INSECURE_CHANNEL")); assertThat(fi.getResponse().isCommitted()).isFalse(); } @Test public void testDecideDetectsUnacceptableChannel() throws Exception { MockHttpServletRequest request = new MockHttpServletRequest(); request.setQueryString("info=true"); request.setServerName("localhost"); request.setContextPath("/bigapp"); request.setServletPath("/servlet"); request.setScheme("https"); request.setSecure(true); request.setServerPort(8443); MockHttpServletResponse response = new MockHttpServletResponse(); FilterInvocation fi = new FilterInvocation(request, response, mock(FilterChain.class)); InsecureChannelProcessor processor = new InsecureChannelProcessor(); processor.decide(fi, SecurityConfig.createList(new String[] { "SOME_IGNORED_ATTRIBUTE", "REQUIRES_INSECURE_CHANNEL" })); assertThat(fi.getResponse().isCommitted()).isTrue(); } @Test public void testDecideRejectsNulls() throws Exception { InsecureChannelProcessor processor = new InsecureChannelProcessor(); processor.afterPropertiesSet(); assertThatIllegalArgumentException().isThrownBy(() -> processor.decide(null, null)); } @Test public void testGettersSetters() { InsecureChannelProcessor processor = new InsecureChannelProcessor(); assertThat(processor.getInsecureKeyword()).isEqualTo("REQUIRES_INSECURE_CHANNEL"); processor.setInsecureKeyword("X"); assertThat(processor.getInsecureKeyword()).isEqualTo("X"); assertThat(processor.getEntryPoint() != null).isTrue(); processor.setEntryPoint(null); assertThat(processor.getEntryPoint() == null).isTrue(); } @Test public void testMissingEntryPoint() throws Exception { InsecureChannelProcessor processor = new InsecureChannelProcessor(); processor.setEntryPoint(null); assertThatIllegalArgumentException().isThrownBy(processor::afterPropertiesSet) .withMessage("entryPoint required"); } @Test public void testMissingSecureChannelKeyword() throws Exception { InsecureChannelProcessor processor = new InsecureChannelProcessor(); processor.setInsecureKeyword(null); assertThatIllegalArgumentException().isThrownBy(processor::afterPropertiesSet) .withMessage("insecureKeyword required"); processor.setInsecureKeyword(""); assertThatIllegalArgumentException().isThrownBy(processor::afterPropertiesSet) .withMessage("insecureKeyword required"); } @Test public void testSupports() { InsecureChannelProcessor processor = new InsecureChannelProcessor(); assertThat(processor.supports(new SecurityConfig("REQUIRES_INSECURE_CHANNEL"))).isTrue(); assertThat(processor.supports(null)).isFalse(); assertThat(processor.supports(new SecurityConfig("NOT_SUPPORTED"))).isFalse(); } }
apache-2.0
Deus0/Zeltexium
Assets/Plugins/DroneMachine/Examples/Scripts/ActivateObjectsOnTrigger.cs
1218
using UnityEngine; namespace DerelictComputer { [RequireComponent(typeof(BoxCollider))] public class ActivateObjectsOnTrigger : MonoBehaviour { public enum TriggerType { Enable, Trigger } [SerializeField] private TriggerType _triggerType = TriggerType.Trigger; [SerializeField] private GameObject[] _objectsToActivate; [SerializeField] private GameObject[] _objectsToDeactivate; private void OnEnable() { var c = GetComponent<BoxCollider>(); c.isTrigger = true; if (_triggerType != TriggerType.Enable) { return; } DoActivate(); } private void OnTriggerEnter() { if (_triggerType != TriggerType.Trigger) { return; } DoActivate(); } private void DoActivate() { foreach (var o in _objectsToActivate) { o.SetActive(true); } foreach (var o in _objectsToDeactivate) { o.SetActive(false); } } } }
apache-2.0
vjanmey/EpicMudfia
com/planet_ink/coffee_mud/Behaviors/NoCombatAssist.java
1985
package com.planet_ink.coffee_mud.Behaviors; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2014 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class NoCombatAssist extends StdBehavior { @Override public String ID(){return "NoCombatAssist";} int tickTocker=1; int tickTock=0; @Override public String accountForYourself() { return "unhelpful passiveness"; } @Override public boolean tick(Tickable ticking, int tickID) { super.tick(ticking,tickID); if(tickID!=Tickable.TICKID_MOB) return true; if(--tickTock>0) return true; ((MOB)ticking).setBitmap(CMath.setb(((MOB)ticking).getBitmap(),MOB.ATT_AUTOASSIST)); if((++tickTocker)==100) tickTocker=99; tickTock=tickTocker; return true; } }
apache-2.0
sarl/sarl
main/coreplugins/io.sarl.lang/src-gen/io/sarl/lang/codebuilder/builders/ISarlConstructorBuilder.java
3266
/* * $Id$ * * File is automatically generated by the Xtext language generator. * Do not change it. * * SARL is an general-purpose agent programming language. * More details on http://www.sarl.io * * Copyright (C) 2014-2021 the original authors or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sarl.lang.codebuilder.builders; import io.sarl.lang.sarl.SarlConstructor; import org.eclipse.emf.common.notify.Notifier; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.xtend.core.xtend.XtendTypeDeclaration; import org.eclipse.xtext.common.types.JvmParameterizedTypeReference; import org.eclipse.xtext.common.types.access.IJvmTypeProvider; import org.eclipse.xtext.xbase.lib.Pure; /** Builder of a Sarl SarlConstructor. */ @SuppressWarnings("all") public interface ISarlConstructorBuilder { /** Find the reference to the type with the given name. * @param typeName the fully qualified name of the type * @return the type reference. */ JvmParameterizedTypeReference newTypeRef(String typeName); /** Find the reference to the type with the given name. * @param context the context for the type reference use * @param typeName the fully qualified name of the type * @return the type reference. */ JvmParameterizedTypeReference newTypeRef(Notifier context, String typeName); /** Dispose the resource. */ void dispose(); /** Replies the context for type resolution. * @return the context or {@code null} if the Ecore object is the context. */ IJvmTypeProvider getTypeResolutionContext(); /** Initialize the Ecore element. * @param container the container of the SarlConstructor. */ void eInit(XtendTypeDeclaration container, IJvmTypeProvider context); /** Replies the generated element. */ @Pure SarlConstructor getSarlConstructor(); /** Replies the resource. */ @Pure Resource eResource(); /** Change the documentation of the element. * * <p>The documentation will be displayed just before the element. * * @param doc the documentation. */ void setDocumentation(String doc); /** Add a formal parameter. * @param name the name of the formal parameter. */ IFormalParameterBuilder addParameter(String name); /** Add a throwable exception. * @param type the fully qualified name of the exception. */ void addException(String type); /** Create the block of code. * @return the block builder. */ IBlockExpressionBuilder getExpression(); /** Add a modifier. * @param modifier the modifier to add. */ void addModifier(String modifier); /** Add a type parameter. * @param name the simple name of the type parameter. * @return the builder of type parameter. */ ITypeParameterBuilder addTypeParameter(String name); }
apache-2.0
aajjbb/contest-files
URI/Enisvaldo.cpp
1270
#include <bits/stdc++.h> template<typename T> T gcd(T a, T b) { if(!b) return a; return gcd(b, a % b); } template<typename T> T lcm(T a, T b) { return a * b / gcd(a, b); } template<typename T> void chmin(T& a, T b) { a = (a > b) ? b : a; } template<typename T> void chmax(T& a, T b) { a = (a < b) ? b : a; } int in() { int x; scanf("%d", &x); return x; } using namespace std; #ifdef ONLINE_JUDGE #define debug(args...) #else #define debug(args...) fprintf(stderr,args) #endif typedef long long Int; typedef unsigned long long uInt; typedef unsigned uint; const int MAXN = 110; int T, N; vector<int> cnt[MAXN]; bool in(int v) { return v >= 10 && v <= 100; } int main(void) { cin >> T; while (T--) { cin >> N; for (int i = 0; i < MAXN; i++) { cnt[i].clear(); } int type, val; int ans = 0; for (int i = 0; i < N; i++) { cin >> type >> val; cnt[type].push_back(val); } for (int i = 1; i < MAXN; i++) { sort(cnt[i].begin(), cnt[i].end()); int best = 0; int M = (int) cnt[i].size(); if (M != 0) best = cnt[i][M - 1]; for (int j = M - 1; j >= 0; j--) { if (in(cnt[i][j])) { best = cnt[i][j]; break; } } ans += best; } cout << ans << endl; } return 0; }
apache-2.0
Anuragigts/igts_classified
application/views/classified/dec_rugs_carpetview.php
11707
<!DOCTYPE html> <html> <head> <title>New & Used Rugs For Sale UK | Buy & Sell Carpets Online | 99 Right Deals</title> <meta name="description" content="Shop for buy & sell carpets online in United Kingdom. And post free ads for new & used rugs for sale in UK on 99 Right Deals." /> <!-- xxx Head Content xxx --> <?php echo $this->load->view('common/head');?> <!-- xxx End xxx --> <link rel="stylesheet" href="<?php echo base_url(); ?>j-folder/css/j-forms.css" /> <link rel="stylesheet" href="<?php echo base_url(); ?>css/innerpagestyles.css" /> <link rel="stylesheet" href="<?php echo base_url();?>libs/slider.css"> <script type="text/javascript" src="<?php echo base_url();?>js/jssor.slider.min.js"></script> <link rel="stylesheet" href="<?php echo base_url();?>js/filter.css"> <script type="text/javascript"> $(document).ready(function() { $('.cd-filter-content').niceScroll({ autohidemode: 'false', cursorborderradius: '0px', background: '#f4f4f4', cursorwidth: '8px', cursorcolor: '#E95413' }); }); </script> <script type="text/javascript"> $(function(){ $(".loc_map").click(function(){ var val = $(this).attr("id"); var val1 = val.split(","); $(".map_show").html('<iframe src = "https://maps.google.com/maps?q='+val1[0]+','+val1[1]+'&hl=es;z=5&amp;output=embed" width="950px" height="300px"></iframe>'); }); }); </script> <?php foreach ($busconcount as $countval) { $allbustype = $countval->allbustype; $business = $countval->business; $consumer = $countval->consumer; } $urgentcnt = $deals_pck['urgentcount']; $platinumcnt = $deals_pck['platinumcount']; $goldcnt = $deals_pck['goldcount']; $freecnt = $deals_pck['freecount']; foreach ($public_adview as $publicview) { $left_ad1 = $publicview->sidead_one; $topad = $publicview->topad; $mid_ad = $publicview->mid_ad; } foreach ($sellerneededcount as $sncnt) { $seller = $sncnt->seller; $needed = $sncnt->needed; $charity = $sncnt->charity; } $kitchen_sub = $this->session->userdata('kitchen_search'); $seller_deals = $this->session->userdata('seller_deals'); $dealurgent = $this->session->userdata('dealurgent'); $dealtitle = $this->session->userdata('dealtitle'); $dealtitle = $this->session->userdata('dealtitle'); $dealprice = $this->session->userdata('dealprice'); $recentdays = $this->session->userdata('recentdays'); $search_bustype = $this->session->userdata('search_bustype'); $location = $this->session->userdata('location'); $latt = $this->session->userdata('latt'); $longg = $this->session->userdata('longg'); ?> <script type="text/javascript"> $(document).ready( function() { $("input:checkbox").change( function() { $("form.jforms").submit(); } ) $('.search_bustype').click(function() { $("form.jforms").submit(); } ) $('.dealtitle_sort').change(function() { $("form.jforms").submit(); } ) $('.price_sort').change(function() { $("form.jforms").submit(); } ) $('.recentdays_sort').change(function() { $("form.jforms").submit(); } ) $(".clear_location").click(function(){ $('#latt').val(''); $('#longg').val(''); $('#find_loc').val(''); $("form.jforms").submit(); }); } ); </script> </head> <body id="home"> <div class="preloader"><div class="status">&nbsp;</div></div> <div id="layout"> <!-- xxx tophead Content xxx --> <?php echo $this->load->view('common/tophead'); ?> <!-- xxx End tophead xxx --> <div class="section-title-01"> <div class="bg_parallax image_01_parallax"></div> </div> <section class="content-central"> <div class="semiboxshadow text-center"> <img src="<?php echo base_url(); ?>img/img-theme/shp-1090x457.png" class="img-responsive" alt="Shadow" title="Shadow view"> </div> <form id="j-forms2" action="<?php echo base_url(); ?>dec_rugs_carpetview/search_filters" class="j-forms jforms" method="post" style="background-color: rgb(255, 255, 255) !important;"> <div class="content_info"> <div class="paddings"> <div class="container pad_bott_50"> <div class="row"> <div class="col-md-10 col-sm-8 col-md-offset-1 add_top"> <?php echo $topad; ?> </div> </div> </div> <div class="container"> <div class="row"> <div class="col-md-3 col-sm-3"> <div class="container-by-widget-filter bg-dark color-white cloth_h3"> <a href="<?php echo base_url(); ?>home-kitchen-services-uk"><h3 class="title-widget">Home Kitchen Filter</h3></a> <h4 class="title-widget ">Rugs & Carpets</h4> <div class="cd-filter-block"> <h4 class="title-widget ">Seller Type</h4> <div class="cd-filter-content" > <div> <label class="checkbox"> <input type="checkbox" name="seller_deals[]" class='seller_deals' value="Seller" <?php if(isset($seller_deals) && in_array('Seller',$seller_deals)) echo 'checked = checked';?> > <i></i> Seller Deals (<?php echo $seller; ?>) </label> <label class="checkbox"> <input type="checkbox" name="seller_deals[]" class='seller_deals' value="Needed" <?php if(isset($seller_deals) && in_array('Needed',$seller_deals)) echo 'checked = checked';?> > <i></i> Needed Deals (<?php echo $needed; ?>) </label> <label class="checkbox"> <input type="checkbox" name="seller_deals[]" class='seller_deals' value="Charity" <?php if(isset($seller_deals) && in_array('Charity',$seller_deals)) echo 'checked = checked';?> > <i></i> Charity Deals (<?php echo $charity; ?>) </label> </div> </div> </div> <div class="cd-filter-block"> <h4 class="title-widget ">Deal Type</h4> <div class="cd-filter-content" > <div> <label class="radio"> <input type="radio" name="search_bustype" class="search_bustype" value="all" <?php if($search_bustype == 'all') echo 'checked = checked';?> checked > <i></i> All (<?php echo $allbustype; ?>) </label> <label class="radio"> <input type="radio" name="search_bustype" class="search_bustype" value="business" <?php if($search_bustype == 'business') echo 'checked = checked';?> > <i></i> Business (<?php echo $business; ?>) </label> <label class="radio"> <input type="radio" name="search_bustype" class="search_bustype" value="consumer" <?php if($search_bustype == 'consumer') echo 'checked = checked';?> > <i></i> Consumer (<?php echo $consumer; ?>) </label> </div> </div> </div> <div class="cd-filter-block"> <h4 class="title-widget ">Location</h4> <div class="cd-filter-content" > <div class="input"> <input type="text" placeholder="Enter Location" id="find_loc" class="find_loc_search" name="find_loc" value="<?php echo $location; ?>"> <input type='hidden' name='latt' id='latt' value='' > <input type='hidden' name='longg' id='longg' value='' > <button class="btn btn-primary sm-btn pull-right find_location" id='find_location' >Find</button> <button class="btn btn-primary sm-btn pull-right clear_location" id='clear_location' >Clear</button> </div> </div> </div> <div class="cd-filter-block"> <h4 class="title-widget">Search Only</h4> <div class="cd-filter-content"> <div> <label class="checkbox"> <input type="checkbox" name="dealurgent[]" class="dealurgent" value="0"<?php if(isset($dealurgent) && in_array('0',$dealurgent)){ echo 'checked = checked';}?> > <i></i> Urgent Deals (<?php echo $urgentcnt; ?>) </label> <label class="checkbox"> <input type="checkbox" name="dealurgent[]" class="dealurgent" value="6"<?php if(isset($dealurgent) && in_array('6',$dealurgent)){ echo 'checked = checked';}?> > <i></i> Significant Deals (<?php echo $platinumcnt; ?>) </label> <label class="checkbox"> <input type="checkbox" name="dealurgent[]" class="dealurgent" value="5"<?php if(isset($dealurgent) && in_array('5',$dealurgent)){ echo 'checked = checked';}?> > <i></i> Most Valued Deals (<?php echo $goldcnt; ?>) </label> <label class="checkbox"> <input type="checkbox" name="dealurgent[]" class="dealurgent" value="4" <?php if(isset($dealurgent) && in_array('4',$dealurgent)){ echo 'checked = checked';}?> > <i></i> Recent Deals (<?php echo $freecnt; ?>) </label> </div> </div> </div> </div> <div class="row top_20"> <div class="col-sm-12 add_left"> <?php echo $left_ad1; ?> </div> </div> </div> <div class="col-md-9 col-sm-9"> <div class="sort-by-container tooltip-hover"> <div class="row"> <div class="col-md-12"> <strong>Sort by:</strong> <ul> <li> <div class="top_bar_top"> <label class="input select"> <select name="dealtitle_sort" class="dealtitle_sort"> <option value="Any" <?php if($dealtitle == 'Any') echo 'selected = selected';?>>Title</option> <option value="atoz" <?php if($dealtitle == 'atoz') echo 'selected = selected';?>>A to Z</option> <option value="ztoa" <?php if($dealtitle == 'ztoa') echo 'selected = selected';?>>Z to A</option> </select> <i></i> </label> </div> </li> <li> <div class="top_bar_top"> <label class="input select"> <select name="price_sort" class="price_sort"> <option value="Any" <?php if($dealprice == 'Any') echo 'selected = selected';?>>Pricing</option> <option value="lowtohigh" <?php if($dealprice == 'lowtohigh') echo 'selected = selected';?>>Low to High</option> <option value="hightolow" <?php if($dealprice == 'hightolow') echo 'selected = selected';?>>High to Low</option> </select> <i></i> </label> </div> </li> <li> <div class="top_bar_top"> <label class="input select"> <select name="recentdays_sort" class="recentdays_sort"> <option value="Any" <?php if($recentdays == 'Any') echo 'selected = selected';?>>Posted On</option> <option value="last24hours" <?php if($recentdays == 'last24hours') echo 'selected = selected';?>>Last 24 Hours</option> <option value="last3days" <?php if($recentdays == 'last3days') echo 'selected = selected';?>>Last 3 Days</option> <option value="last7days" <?php if($recentdays == 'last7days') echo 'selected = selected';?>>Last 7 Days</option> <option value="last14days" <?php if($recentdays == 'last14days') echo 'selected = selected';?>>Last 14 Days</option> <option value="last1month" <?php if($recentdays == 'last1month') echo 'selected = selected';?>>Last 1 month</option> </select> <i></i> </label> </div> </li> </ul> </div> </div> </div> <div class="row list_view_searches kitchen_search_result"> <?php echo $this->load->view("classified/dec_rugs_carpetview_search"); ?> </div> </div> </div> </div> </div> </div> </form> </section> <!-- xxx footer Content xxx --> <?php echo $this->load->view('common/footer');?> <!-- xxx footer End xxx --> </div> <div class="modal fade" id="map_location" role="dialog"> <div class="modal-dialog"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal">&times;</button> <h2>Map Location</h2> </div> <div class="modal-body map_show"> </div> </div> </div> </div> <script src="<?php echo base_url();?>js/jquery.js"></script> <script src="<?php echo base_url();?>j-folder/js/jquery.validate.min.js"></script> <script src="<?php echo base_url();?>j-folder/js/jquery.maskedinput.min.js"></script> <script type="text/javascript" src="<?php echo base_url();?>libs/jquery.xuSlider.js"></script> <script> $('.xuSlider').xuSlider(); </script> <script> $(document).ready(function(){ $('#find_loc').autocomplete({ source: '<?php echo base_url(); ?>classified/search_autocomplete', minLength: 1, messages: { noResults:'No Data Found' } }); }); </script> <script src="<?php echo base_url();?>js/jquery.nicescroll.js"></script> <script src="<?php echo base_url();?>libs/jquery.mixitup.min.js"></script> <script src="<?php echo base_url();?>libs/main.js"></script> <!-- xxx footerscript Content xxx --> <?php echo $this->load->view('common/footerscript');?> <!-- xxx footerscript End xxx --> </body> </html>
apache-2.0
shaunrampersad/puppet-pg_monz
spec/spec_helper_acceptance.rb
672
require 'beaker-rspec/spec_helper' require 'beaker-rspec/helpers/serverspec' require 'beaker/puppet_install_helper' run_puppet_install_helper unless ENV['BEAKER_provision'] == 'no' RSpec.configure do |c| # Project root proj_root = File.expand_path(File.join(File.dirname(__FILE__), '..')) # Readable test descriptions c.formatter = :documentation # Configure all nodes in nodeset c.before :suite do # Install module and dependencies puppet_module_install(:source => proj_root, :module_name => 'pg_monz') hosts.each do |host| on host, puppet('module', 'install', 'puppetlabs-stdlib'), { :acceptable_exit_codes => [0,1] } end end end
apache-2.0
Naeregwen/games-librarian
src/commons/enums/OnlineState.java
1299
/** * Copyright 2012-2014 Naeregwen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package commons.enums; /** * @author Naeregwen * */ public enum OnlineState { ONLINE ("online", "/images/icons/status_green.png"), INGAME ("in-game", "/images/icons/status_busy.png"), OFFLINE ("offline", "/images/icons/status_offline.png"), UNKNOWN ("unknown", "/images/icons/status_invisible.png"); String steamLabel; String iconPath; OnlineState(String steamLabel, String iconPath) { this.steamLabel = steamLabel; this.iconPath = iconPath; } /** * @return the steamLabel */ public String getSteamLabel() { return steamLabel; } /** * @return the iconPath */ public String getIconPath() { return iconPath; } }
apache-2.0
AndersonFCanel/JavaPassoAPasso
Basico Passo a passo/j_arrays/Exemplos/M_SomarVetores.java
1095
package j_arrays.Exemplos; import java.util.Scanner; public class M_SomarVetores { static void somarVetores(double[] a, double[] b, double[] c) { int i; for(i = 0; i < c.length; i++) c[i] = a[i] + b[i]; } static void ler(double[] v) { Scanner s; int i; s = new Scanner(System.in); for(i = 0; i < v.length; i++) { System.out.print("Digite um real: "); v[i] = s.nextDouble(); } } static void mostrar(double[] v) { int i; System.out.print("{ "); for(i = 0; i < v.length; i++) System.out.print(v[i] + " "); System.out.println("}"); } public static void main(String[] args) { double[] x, y, w; x = new double[3]; y = new double[3]; w = new double[3]; ler(x); mostrar(x); ler(y); mostrar(y); System.out.println("A soma dos vetores é:"); somarVetores(x, y, w); mostrar(w); } }
apache-2.0
chicagozer/rheosoft
camel-core/src/main/java/org/apache/camel/impl/DefaultShutdownStrategy.java
20051
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.impl; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.apache.camel.CamelContext; import org.apache.camel.CamelContextAware; import org.apache.camel.Consumer; import org.apache.camel.Route; import org.apache.camel.ShutdownRoute; import org.apache.camel.ShutdownRunningTask; import org.apache.camel.SuspendableService; import org.apache.camel.spi.RouteStartupOrder; import org.apache.camel.spi.ShutdownAware; import org.apache.camel.spi.ShutdownStrategy; import org.apache.camel.util.EventHelper; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.ServiceHelper; import org.apache.camel.util.StopWatch; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Default {@link org.apache.camel.spi.ShutdownStrategy} which uses graceful shutdown. * <p/> * Graceful shutdown ensures that any inflight and pending messages will be taken into account * and it will wait until these exchanges has been completed. * <p/> * As this strategy will politely wait until all exchanges has been completed it can potential wait * for a long time, and hence why a timeout value can be set. When the timeout triggers you can also * specify whether the remainder consumers should be shutdown now or ignore. * <p/> * Will by default use a timeout of 300 seconds (5 minutes) by which it will shutdown now the remaining consumers. * This ensures that when shutting down Camel it at some point eventually will shutdown. * This behavior can of course be configured using the {@link #setTimeout(long)} and * {@link #setShutdownNowOnTimeout(boolean)} methods. * <p/> * Routes will by default be shutdown in the reverse order of which they where started. * You can customize this using the {@link #setShutdownRoutesInReverseOrder(boolean)} method. * * @version */ public class DefaultShutdownStrategy extends ServiceSupport implements ShutdownStrategy, CamelContextAware { private static final transient Logger LOG = LoggerFactory.getLogger(DefaultShutdownStrategy.class); private CamelContext camelContext; private ExecutorService executor; private long timeout = 5 * 60; private TimeUnit timeUnit = TimeUnit.SECONDS; private boolean shutdownNowOnTimeout = true; private boolean shutdownRoutesInReverseOrder = true; public DefaultShutdownStrategy() { } public DefaultShutdownStrategy(CamelContext camelContext) { this.camelContext = camelContext; } public void shutdown(CamelContext context, List<RouteStartupOrder> routes) throws Exception { shutdown(context, routes, getTimeout(), getTimeUnit()); } public void suspend(CamelContext context, List<RouteStartupOrder> routes) throws Exception { doShutdown(context, routes, getTimeout(), getTimeUnit(), true, false); } public void shutdown(CamelContext context, List<RouteStartupOrder> routes, long timeout, TimeUnit timeUnit) throws Exception { doShutdown(context, routes, timeout, timeUnit, false, false); } public boolean shutdown(CamelContext context, RouteStartupOrder route, long timeout, TimeUnit timeUnit, boolean abortAfterTimeout) throws Exception { List<RouteStartupOrder> routes = new ArrayList<RouteStartupOrder>(1); routes.add(route); return doShutdown(context, routes, timeout, timeUnit, false, abortAfterTimeout); } public void suspend(CamelContext context, List<RouteStartupOrder> routes, long timeout, TimeUnit timeUnit) throws Exception { doShutdown(context, routes, timeout, timeUnit, true, false); } protected boolean doShutdown(CamelContext context, List<RouteStartupOrder> routes, long timeout, TimeUnit timeUnit, boolean suspendOnly, boolean abortAfterTimeout) throws Exception { StopWatch watch = new StopWatch(); // at first sort according to route startup order List<RouteStartupOrder> routesOrdered = new ArrayList<RouteStartupOrder>(routes); Collections.sort(routesOrdered, new Comparator<RouteStartupOrder>() { public int compare(RouteStartupOrder o1, RouteStartupOrder o2) { return o1.getStartupOrder() - o2.getStartupOrder(); } }); if (shutdownRoutesInReverseOrder) { Collections.reverse(routesOrdered); } if (timeout > 0) { LOG.info("Starting to graceful shutdown " + routesOrdered.size() + " routes (timeout " + timeout + " " + timeUnit.toString().toLowerCase() + ")"); } else { LOG.info("Starting to graceful shutdown " + routesOrdered.size() + " routes (no timeout)"); } // use another thread to perform the shutdowns so we can support timeout Future future = getExecutorService().submit(new ShutdownTask(context, routesOrdered, suspendOnly, abortAfterTimeout)); try { if (timeout > 0) { future.get(timeout, timeUnit); } else { future.get(); } } catch (TimeoutException e) { // timeout then cancel the task future.cancel(true); // if set, stop processing and return false to indicate that the shutdown is aborting if (abortAfterTimeout) { LOG.warn("Timeout occurred. Aborting the shutdown now."); return false; } else { if (shutdownNowOnTimeout) { LOG.warn("Timeout occurred. Now forcing the routes to be shutdown now."); // force the routes to shutdown now shutdownRoutesNow(routesOrdered); } else { LOG.warn("Timeout occurred. Will ignore shutting down the remainder routes."); } } } catch (ExecutionException e) { // unwrap execution exception throw ObjectHelper.wrapRuntimeCamelException(e.getCause()); } // convert to seconds as its easier to read than a big milli seconds number long seconds = TimeUnit.SECONDS.convert(watch.stop(), TimeUnit.MILLISECONDS); LOG.info("Graceful shutdown of " + routesOrdered.size() + " routes completed in " + seconds + " seconds"); return true; } public void setTimeout(long timeout) { this.timeout = timeout; } public long getTimeout() { return timeout; } public void setTimeUnit(TimeUnit timeUnit) { this.timeUnit = timeUnit; } public TimeUnit getTimeUnit() { return timeUnit; } public void setShutdownNowOnTimeout(boolean shutdownNowOnTimeout) { this.shutdownNowOnTimeout = shutdownNowOnTimeout; } public boolean isShutdownNowOnTimeout() { return shutdownNowOnTimeout; } public boolean isShutdownRoutesInReverseOrder() { return shutdownRoutesInReverseOrder; } public void setShutdownRoutesInReverseOrder(boolean shutdownRoutesInReverseOrder) { this.shutdownRoutesInReverseOrder = shutdownRoutesInReverseOrder; } public CamelContext getCamelContext() { return camelContext; } public void setCamelContext(CamelContext camelContext) { this.camelContext = camelContext; } /** * Shutdown all the consumers immediately. * * @param routes the routes to shutdown */ protected void shutdownRoutesNow(List<RouteStartupOrder> routes) { for (RouteStartupOrder order : routes) { // set the route to shutdown as fast as possible by stopping after // it has completed its current task ShutdownRunningTask current = order.getRoute().getRouteContext().getShutdownRunningTask(); if (current != ShutdownRunningTask.CompleteCurrentTaskOnly) { LOG.debug("Changing shutdownRunningTask from {} to " + ShutdownRunningTask.CompleteCurrentTaskOnly + " on route {} to shutdown faster", current, order.getRoute().getId()); order.getRoute().getRouteContext().setShutdownRunningTask(ShutdownRunningTask.CompleteCurrentTaskOnly); } for (Consumer consumer : order.getInputs()) { shutdownNow(consumer); } } } /** * Shutdown all the consumers immediately. * * @param consumers the consumers to shutdown */ protected void shutdownNow(List<Consumer> consumers) { for (Consumer consumer : consumers) { shutdownNow(consumer); } } /** * Shutdown the consumer immediately. * * @param consumer the consumer to shutdown */ protected void shutdownNow(Consumer consumer) { LOG.trace("Shutting down: {}", consumer); // allow us to do custom work before delegating to service helper try { ServiceHelper.stopService(consumer); } catch (Throwable e) { LOG.warn("Error occurred while shutting down route: " + consumer + ". This exception will be ignored.", e); // fire event EventHelper.notifyServiceStopFailure(consumer.getEndpoint().getCamelContext(), consumer, e); } LOG.trace("Shutdown complete for: {}", consumer); } /** * Suspends/stops the consumer immediately. * * @param consumer the consumer to suspend */ protected void suspendNow(Consumer consumer) { LOG.trace("Suspending: {}", consumer); // allow us to do custom work before delegating to service helper try { ServiceHelper.suspendService(consumer); } catch (Throwable e) { LOG.warn("Error occurred while suspending route: " + consumer + ". This exception will be ignored.", e); // fire event EventHelper.notifyServiceStopFailure(consumer.getEndpoint().getCamelContext(), consumer, e); } LOG.trace("Suspend complete for: {}", consumer); } private ExecutorService getExecutorService() { if (executor == null) { executor = camelContext.getExecutorServiceStrategy().newSingleThreadExecutor(this, "ShutdownTask"); } return executor; } @Override protected void doStart() throws Exception { ObjectHelper.notNull(camelContext, "CamelContext"); } @Override protected void doStop() throws Exception { // noop } @Override protected void doShutdown() throws Exception { if (executor != null) { camelContext.getExecutorServiceStrategy().shutdownNow(executor); // should clear executor so we can restart by creating a new thread pool executor = null; } } class ShutdownDeferredConsumer { private final Route route; private final Consumer consumer; ShutdownDeferredConsumer(Route route, Consumer consumer) { this.route = route; this.consumer = consumer; } Route getRoute() { return route; } Consumer getConsumer() { return consumer; } } /** * Shutdown task which shutdown all the routes in a graceful manner. */ class ShutdownTask implements Runnable { private final CamelContext context; private final List<RouteStartupOrder> routes; private final boolean suspendOnly; private final boolean abortAfterTimeout; public ShutdownTask(CamelContext context, List<RouteStartupOrder> routes, boolean suspendOnly, boolean abortAfterTimeout) { this.context = context; this.routes = routes; this.suspendOnly = suspendOnly; this.abortAfterTimeout = abortAfterTimeout; } public void run() { // the strategy in this run method is to // 1) go over the routes and shutdown those routes which can be shutdown asap // some routes will be deferred to shutdown at the end, as they are needed // by other routes so they can complete their tasks // 2) wait until all inflight and pending exchanges has been completed // 3) shutdown the deferred routes LOG.debug("There are {} routes to {}", routes.size(), suspendOnly ? "suspend" : "shutdown"); // list of deferred consumers to shutdown when all exchanges has been completed routed // and thus there are no more inflight exchanges so they can be safely shutdown at that time List<ShutdownDeferredConsumer> deferredConsumers = new ArrayList<ShutdownDeferredConsumer>(); for (RouteStartupOrder order : routes) { ShutdownRoute shutdownRoute = order.getRoute().getRouteContext().getShutdownRoute(); ShutdownRunningTask shutdownRunningTask = order.getRoute().getRouteContext().getShutdownRunningTask(); if (LOG.isTraceEnabled()) { LOG.trace("{}{} with options [{},{}]", new Object[]{suspendOnly ? "Suspending route: " : "Shutting down route: ", order.getRoute().getId(), shutdownRoute, shutdownRunningTask}); } for (Consumer consumer : order.getInputs()) { boolean suspend = false; // assume we should shutdown if we are not deferred boolean shutdown = shutdownRoute != ShutdownRoute.Defer; if (shutdown) { // if we are to shutdown then check whether we can suspend instead as its a more // gentle way to graceful shutdown // some consumers do not support shutting down so let them decide // if a consumer is suspendable then prefer to use that and then shutdown later if (consumer instanceof ShutdownAware) { shutdown = !((ShutdownAware) consumer).deferShutdown(shutdownRunningTask); } if (shutdown && consumer instanceof SuspendableService) { // we prefer to suspend over shutdown suspend = true; } } // log at info level when a route has been shutdown (otherwise log at debug level to not be too noisy) if (suspend) { // only suspend it and then later shutdown it suspendNow(consumer); // add it to the deferred list so the route will be shutdown later deferredConsumers.add(new ShutdownDeferredConsumer(order.getRoute(), consumer)); LOG.debug("Route: {} suspended and shutdown deferred, was consuming from: {}", order.getRoute().getId(), order.getRoute().getEndpoint()); } else if (shutdown) { shutdownNow(consumer); LOG.info("Route: {} shutdown complete, was consuming from: {}", order.getRoute().getId(), order.getRoute().getEndpoint()); } else { // we will stop it later, but for now it must run to be able to help all inflight messages // be safely completed deferredConsumers.add(new ShutdownDeferredConsumer(order.getRoute(), consumer)); LOG.debug("Route: " + order.getRoute().getId() + (suspendOnly ? " shutdown deferred." : " suspension deferred.")); } } } // wait till there are no more pending and inflight messages boolean done = false; long loopDelaySeconds = 1; long loopCount = 0; while (!done) { int size = 0; for (RouteStartupOrder order : routes) { for (Consumer consumer : order.getInputs()) { int inflight = context.getInflightRepository().size(consumer.getEndpoint()); // include any additional pending exchanges on some consumers which may have internal // memory queues such as seda if (consumer instanceof ShutdownAware) { inflight += ((ShutdownAware) consumer).getPendingExchangesSize(); } if (inflight > 0) { size += inflight; LOG.trace("{} inflight and pending exchanges for consumer: {}", inflight, consumer); } } } if (size > 0) { try { LOG.info("Waiting as there are still " + size + " inflight and pending exchanges to complete, timeout in " + (TimeUnit.SECONDS.convert(getTimeout(), getTimeUnit()) - (loopCount++ * loopDelaySeconds)) + " seconds."); Thread.sleep(loopDelaySeconds * 1000); } catch (InterruptedException e) { if (abortAfterTimeout) { LOG.warn("Interrupted while waiting during graceful shutdown, will abort."); return; } else { LOG.warn("Interrupted while waiting during graceful shutdown, will force shutdown now."); break; } } } else { done = true; } } // prepare for shutdown for (ShutdownDeferredConsumer deferred : deferredConsumers) { Consumer consumer = deferred.getConsumer(); if (consumer instanceof ShutdownAware) { LOG.trace("Route: {} preparing to shutdown.", deferred.getRoute().getId()); ((ShutdownAware) consumer).prepareShutdown(); LOG.debug("Route: {} preparing to shutdown complete.", deferred.getRoute().getId()); } } // now all messages has been completed then stop the deferred consumers for (ShutdownDeferredConsumer deferred : deferredConsumers) { Consumer consumer = deferred.getConsumer(); if (suspendOnly) { suspendNow(consumer); LOG.info("Route: {} suspend complete, was consuming from: {}", deferred.getRoute().getId(), deferred.getConsumer().getEndpoint()); } else { shutdownNow(consumer); LOG.info("Route: {} shutdown complete, was consuming from: {}", deferred.getRoute().getId(), deferred.getConsumer().getEndpoint()); } } } } }
apache-2.0
consulo/consulo-android
android/android/src/org/jetbrains/android/dom/attrs/StyleableDefinition.java
331
package org.jetbrains.android.dom.attrs; import org.jetbrains.annotations.NotNull; import java.util.List; /** * @author Eugene.Kudelevsky */ public interface StyleableDefinition { @NotNull List<StyleableDefinition> getChildren(); @NotNull String getName(); @NotNull List<AttributeDefinition> getAttributes(); }
apache-2.0
hank/litecoinj-new
core/src/test/java/com/google/litecoin/core/ChainSplitTest.java
32172
/* * Copyright 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.litecoin.core; import com.google.litecoin.core.TransactionConfidence.ConfidenceType; import com.google.litecoin.params.UnitTestParams; import com.google.litecoin.store.MemoryBlockStore; import com.google.litecoin.utils.BriefLogFormatter; import com.google.litecoin.utils.TestUtils; import com.google.litecoin.utils.Threading; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.math.BigInteger; import java.net.InetAddress; import java.util.ArrayList; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import static com.google.common.base.Preconditions.checkNotNull; import static org.junit.Assert.*; public class ChainSplitTest { private static final Logger log = LoggerFactory.getLogger(ChainSplitTest.class); private NetworkParameters unitTestParams; private Wallet wallet; private BlockChain chain; private Address coinsTo; private Address coinsTo2; private Address someOtherGuy; private MemoryBlockStore blockStore; @Before public void setUp() throws Exception { BriefLogFormatter.init(); Wallet.SendRequest.DEFAULT_FEE_PER_KB = BigInteger.ZERO; unitTestParams = UnitTestParams.get(); wallet = new Wallet(unitTestParams); wallet.addKey(new ECKey()); wallet.addKey(new ECKey()); blockStore = new MemoryBlockStore(unitTestParams); chain = new BlockChain(unitTestParams, wallet, blockStore); coinsTo = wallet.getKeys().get(0).toAddress(unitTestParams); coinsTo2 = wallet.getKeys().get(1).toAddress(unitTestParams); someOtherGuy = new ECKey().toAddress(unitTestParams); } @Test public void testForking1() throws Exception { // Check that if the block chain forks, we end up using the right chain. Only tests inbound transactions // (receiving coins). Checking that we understand reversed spends is in testForking2. final AtomicBoolean reorgHappened = new AtomicBoolean(); final AtomicInteger walletChanged = new AtomicInteger(); wallet.addEventListener(new AbstractWalletEventListener() { @Override public void onReorganize(Wallet wallet) { reorgHappened.set(true); } @Override public void onWalletChanged(Wallet wallet) { walletChanged.incrementAndGet(); } }); // Start by building a couple of blocks on top of the genesis block. Block b1 = unitTestParams.getGenesisBlock().createNextBlock(coinsTo); Block b2 = b1.createNextBlock(coinsTo); assertTrue(chain.add(b1)); assertTrue(chain.add(b2)); Threading.waitForUserCode(); assertFalse(reorgHappened.get()); assertEquals(2, walletChanged.get()); // We got two blocks which sent 50 coins each to us. assertEquals("100.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); // We now have the following chain: // genesis -> b1 -> b2 // // so fork like this: // // genesis -> b1 -> b2 // \-> b3 // // Nothing should happen at this point. We saw b2 first so it takes priority. Block b3 = b1.createNextBlock(someOtherGuy); assertTrue(chain.add(b3)); Threading.waitForUserCode(); assertFalse(reorgHappened.get()); // No re-org took place. assertEquals(2, walletChanged.get()); assertEquals("100.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); // Check we can handle multi-way splits: this is almost certainly going to be extremely rare, but we have to // handle it anyway. The same transaction appears in b7/b8 (side chain) but not b2 or b3. // genesis -> b1--> b2 // |-> b3 // |-> b7 (x) // \-> b8 (x) Block b7 = b1.createNextBlock(coinsTo); assertTrue(chain.add(b7)); Block b8 = b1.createNextBlock(coinsTo); final Transaction t = b7.getTransactions().get(1); final Sha256Hash tHash = t.getHash(); b8.addTransaction(t); b8.solve(); assertTrue(chain.add(roundtrip(b8))); Threading.waitForUserCode(); assertEquals(2, wallet.getTransaction(tHash).getAppearsInHashes().size()); assertFalse(reorgHappened.get()); // No re-org took place. assertEquals(5, walletChanged.get()); assertEquals("100.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); // Now we add another block to make the alternative chain longer. assertTrue(chain.add(b3.createNextBlock(someOtherGuy))); Threading.waitForUserCode(); assertTrue(reorgHappened.get()); // Re-org took place. assertEquals(6, walletChanged.get()); reorgHappened.set(false); // // genesis -> b1 -> b2 // \-> b3 -> b4 // We lost some coins! b2 is no longer a part of the best chain so our available balance should drop to 50. // It's now pending reconfirmation. assertEquals("50.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); // ... and back to the first chain. Block b5 = b2.createNextBlock(coinsTo); Block b6 = b5.createNextBlock(coinsTo); assertTrue(chain.add(b5)); assertTrue(chain.add(b6)); // // genesis -> b1 -> b2 -> b5 -> b6 // \-> b3 -> b4 // Threading.waitForUserCode(); assertTrue(reorgHappened.get()); assertEquals(9, walletChanged.get()); assertEquals("200.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); } @Test public void testForking2() throws Exception { // Check that if the chain forks and new coins are received in the alternate chain our balance goes up // after the re-org takes place. Block b1 = unitTestParams.getGenesisBlock().createNextBlock(someOtherGuy); Block b2 = b1.createNextBlock(someOtherGuy); assertTrue(chain.add(b1)); assertTrue(chain.add(b2)); // genesis -> b1 -> b2 // \-> b3 -> b4 assertEquals(BigInteger.ZERO, wallet.getBalance()); Block b3 = b1.createNextBlock(coinsTo); Block b4 = b3.createNextBlock(someOtherGuy); assertTrue(chain.add(b3)); assertEquals(BigInteger.ZERO, wallet.getBalance()); assertTrue(chain.add(b4)); assertEquals("50.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); } @Test public void testForking3() throws Exception { // Check that we can handle our own spends being rolled back by a fork. Block b1 = unitTestParams.getGenesisBlock().createNextBlock(coinsTo); chain.add(b1); assertEquals("50.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); Address dest = new ECKey().toAddress(unitTestParams); Transaction spend = wallet.createSend(dest, Utils.toNanoCoins(10, 0)); wallet.commitTx(spend); // Waiting for confirmation ... make it eligible for selection. assertEquals(BigInteger.ZERO, wallet.getBalance()); spend.getConfidence().markBroadcastBy(new PeerAddress(InetAddress.getByAddress(new byte[]{1, 2, 3, 4}))); spend.getConfidence().markBroadcastBy(new PeerAddress(InetAddress.getByAddress(new byte[]{5,6,7,8}))); assertEquals(ConfidenceType.PENDING, spend.getConfidence().getConfidenceType()); assertEquals(Utils.toNanoCoins(40, 0), wallet.getBalance()); Block b2 = b1.createNextBlock(someOtherGuy); b2.addTransaction(spend); b2.solve(); chain.add(roundtrip(b2)); // We have 40 coins in change. assertEquals(ConfidenceType.BUILDING, spend.getConfidence().getConfidenceType()); // genesis -> b1 (receive coins) -> b2 (spend coins) // \-> b3 -> b4 Block b3 = b1.createNextBlock(someOtherGuy); Block b4 = b3.createNextBlock(someOtherGuy); chain.add(b3); chain.add(b4); // b4 causes a re-org that should make our spend go pending again. assertEquals(Utils.toNanoCoins(40, 0), wallet.getBalance()); assertEquals(ConfidenceType.PENDING, spend.getConfidence().getConfidenceType()); } @Test public void testForking4() throws Exception { // Check that we can handle external spends on an inactive chain becoming active. An external spend is where // we see a transaction that spends our own coins but we did not broadcast it ourselves. This happens when // keys are being shared between wallets. Block b1 = unitTestParams.getGenesisBlock().createNextBlock(coinsTo); chain.add(b1); assertEquals("50.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); Address dest = new ECKey().toAddress(unitTestParams); Transaction spend = wallet.createSend(dest, Utils.toNanoCoins(50, 0)); // We do NOT confirm the spend here. That means it's not considered to be pending because createSend is // stateless. For our purposes it is as if some other program with our keys created the tx. // // genesis -> b1 (receive 50) --> b2 // \-> b3 (external spend) -> b4 Block b2 = b1.createNextBlock(someOtherGuy); chain.add(b2); Block b3 = b1.createNextBlock(someOtherGuy); b3.addTransaction(spend); b3.solve(); chain.add(roundtrip(b3)); // The external spend is now pending. assertEquals(Utils.toNanoCoins(0, 0), wallet.getBalance()); Transaction tx = wallet.getTransaction(spend.getHash()); assertEquals(ConfidenceType.PENDING, tx.getConfidence().getConfidenceType()); Block b4 = b3.createNextBlock(someOtherGuy); chain.add(b4); // The external spend is now active. assertEquals(Utils.toNanoCoins(0, 0), wallet.getBalance()); assertEquals(ConfidenceType.BUILDING, tx.getConfidence().getConfidenceType()); } @Test public void testForking5() throws Exception { // Test the standard case in which a block containing identical transactions appears on a side chain. Block b1 = unitTestParams.getGenesisBlock().createNextBlock(coinsTo); chain.add(b1); final Transaction t = b1.transactions.get(1); assertEquals("50.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); // genesis -> b1 // -> b2 Block b2 = unitTestParams.getGenesisBlock().createNextBlock(coinsTo); Transaction b2coinbase = b2.transactions.get(0); b2.transactions.clear(); b2.addTransaction(b2coinbase); b2.addTransaction(t); b2.solve(); chain.add(roundtrip(b2)); assertEquals("50.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); assertTrue(wallet.isConsistent()); assertEquals(2, wallet.getTransaction(t.getHash()).getAppearsInHashes().size()); // -> b2 -> b3 Block b3 = b2.createNextBlock(someOtherGuy); chain.add(b3); assertEquals("50.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); } private Block roundtrip(Block b2) throws ProtocolException { return new Block(unitTestParams, b2.bitcoinSerialize()); } @Test public void testForking6() throws Exception { // Test the case in which a side chain block contains a tx, and then it appears in the main chain too. Block b1 = unitTestParams.getGenesisBlock().createNextBlock(someOtherGuy); chain.add(b1); // genesis -> b1 // -> b2 Block b2 = unitTestParams.getGenesisBlock().createNextBlock(coinsTo); chain.add(b2); assertEquals(BigInteger.ZERO, wallet.getBalance()); // genesis -> b1 -> b3 // -> b2 Block b3 = b1.createNextBlock(someOtherGuy); b3.addTransaction(b2.transactions.get(1)); b3.solve(); chain.add(roundtrip(b3)); assertEquals("50.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); } @Test public void testDoubleSpendOnFork() throws Exception { // Check what happens when a re-org happens and one of our confirmed transactions becomes invalidated by a // double spend on the new best chain. final boolean[] eventCalled = new boolean[1]; wallet.addEventListener(new AbstractWalletEventListener() { @Override public void onTransactionConfidenceChanged(Wallet wallet, Transaction tx) { super.onTransactionConfidenceChanged(wallet, tx); if (tx.getConfidence().getConfidenceType() == TransactionConfidence.ConfidenceType.DEAD) eventCalled[0] = true; } }); Block b1 = unitTestParams.getGenesisBlock().createNextBlock(coinsTo); chain.add(b1); Transaction t1 = wallet.createSend(someOtherGuy, Utils.toNanoCoins(10, 0)); Address yetAnotherGuy = new ECKey().toAddress(unitTestParams); Transaction t2 = wallet.createSend(yetAnotherGuy, Utils.toNanoCoins(20, 0)); wallet.commitTx(t1); // Receive t1 as confirmed by the network. Block b2 = b1.createNextBlock(new ECKey().toAddress(unitTestParams)); b2.addTransaction(t1); b2.solve(); chain.add(roundtrip(b2)); // Now we make a double spend become active after a re-org. Block b3 = b1.createNextBlock(new ECKey().toAddress(unitTestParams)); b3.addTransaction(t2); b3.solve(); chain.add(roundtrip(b3)); // Side chain. Block b4 = b3.createNextBlock(new ECKey().toAddress(unitTestParams)); chain.add(b4); // New best chain. Threading.waitForUserCode(); // Should have seen a double spend. assertTrue(eventCalled[0]); assertEquals(Utils.toNanoCoins(30, 0), wallet.getBalance()); } @Test public void testDoubleSpendOnForkPending() throws Exception { // Check what happens when a re-org happens and one of our unconfirmed transactions becomes invalidated by a // double spend on the new best chain. final Transaction[] eventDead = new Transaction[1]; final Transaction[] eventReplacement = new Transaction[1]; wallet.addEventListener(new AbstractWalletEventListener() { @Override public void onTransactionConfidenceChanged(Wallet wallet, Transaction tx) { super.onTransactionConfidenceChanged(wallet, tx); if (tx.getConfidence().getConfidenceType() == TransactionConfidence.ConfidenceType.DEAD) { eventDead[0] = tx; eventReplacement[0] = tx.getConfidence().getOverridingTransaction(); } } }); // Start with 50 coins. Block b1 = unitTestParams.getGenesisBlock().createNextBlock(coinsTo); chain.add(b1); Transaction t1 = checkNotNull(wallet.createSend(someOtherGuy, Utils.toNanoCoins(10, 0))); Address yetAnotherGuy = new ECKey().toAddress(unitTestParams); Transaction t2 = checkNotNull(wallet.createSend(yetAnotherGuy, Utils.toNanoCoins(20, 0))); wallet.commitTx(t1); // t1 is still pending ... Block b2 = b1.createNextBlock(new ECKey().toAddress(unitTestParams)); chain.add(b2); assertEquals(Utils.toNanoCoins(0, 0), wallet.getBalance()); assertEquals(Utils.toNanoCoins(40, 0), wallet.getBalance(Wallet.BalanceType.ESTIMATED)); // Now we make a double spend become active after a re-org. // genesis -> b1 -> b2 [t1 pending] // \-> b3 (t2) -> b4 Block b3 = b1.createNextBlock(new ECKey().toAddress(unitTestParams)); b3.addTransaction(t2); b3.solve(); chain.add(roundtrip(b3)); // Side chain. Block b4 = b3.createNextBlock(new ECKey().toAddress(unitTestParams)); chain.add(b4); // New best chain. Threading.waitForUserCode(); // Should have seen a double spend against the pending pool. // genesis -> b1 -> b2 [t1 dead and exited the miners mempools] // \-> b3 (t2) -> b4 assertEquals(t1, eventDead[0]); assertEquals(t2, eventReplacement[0]); assertEquals(Utils.toNanoCoins(30, 0), wallet.getBalance()); // ... and back to our own parallel universe. Block b5 = b2.createNextBlock(new ECKey().toAddress(unitTestParams)); chain.add(b5); Block b6 = b5.createNextBlock(new ECKey().toAddress(unitTestParams)); chain.add(b6); // genesis -> b1 -> b2 -> b5 -> b6 [t1 still dead] // \-> b3 [t2 resurrected and now pending] -> b4 assertEquals(Utils.toNanoCoins(0, 0), wallet.getBalance()); // t2 is pending - resurrected double spends take precedence over our dead transactions (which are in nobodies // mempool by this point). t1 = checkNotNull(wallet.getTransaction(t1.getHash())); t2 = checkNotNull(wallet.getTransaction(t2.getHash())); assertEquals(ConfidenceType.DEAD, t1.getConfidence().getConfidenceType()); assertEquals(ConfidenceType.PENDING, t2.getConfidence().getConfidenceType()); } @Test public void txConfidenceLevels() throws Exception { // Check that as the chain forks and re-orgs, the confidence data associated with each transaction is // maintained correctly. final ArrayList<Transaction> txns = new ArrayList<Transaction>(3); wallet.addEventListener(new AbstractWalletEventListener() { @Override public void onCoinsReceived(Wallet wallet, Transaction tx, BigInteger prevBalance, BigInteger newBalance) { txns.add(tx); } }); // Start by building three blocks on top of the genesis block. All send to us. Block b1 = unitTestParams.getGenesisBlock().createNextBlock(coinsTo); BigInteger work1 = b1.getWork(); Block b2 = b1.createNextBlock(coinsTo2); BigInteger work2 = b2.getWork(); Block b3 = b2.createNextBlock(coinsTo2); BigInteger work3 = b3.getWork(); assertTrue(chain.add(b1)); assertTrue(chain.add(b2)); assertTrue(chain.add(b3)); Threading.waitForUserCode(); // Check the transaction confidence levels are correct. assertEquals(3, txns.size()); assertEquals(1, txns.get(0).getConfidence().getAppearedAtChainHeight()); assertEquals(2, txns.get(1).getConfidence().getAppearedAtChainHeight()); assertEquals(3, txns.get(2).getConfidence().getAppearedAtChainHeight()); assertEquals(3, txns.get(0).getConfidence().getDepthInBlocks()); assertEquals(2, txns.get(1).getConfidence().getDepthInBlocks()); assertEquals(1, txns.get(2).getConfidence().getDepthInBlocks()); assertEquals(work1.add(work2).add(work3), txns.get(0).getConfidence().getWorkDone()); assertEquals(work2.add(work3), txns.get(1).getConfidence().getWorkDone()); assertEquals(work3, txns.get(2).getConfidence().getWorkDone()); // We now have the following chain: // genesis -> b1 -> b2 -> b3 // // so fork like this: // // genesis -> b1 -> b2 -> b3 // \-> b4 -> b5 // // Nothing should happen at this point. We saw b2 and b3 first so it takes priority. Block b4 = b1.createNextBlock(someOtherGuy); BigInteger work4 = b4.getWork(); Block b5 = b4.createNextBlock(someOtherGuy); BigInteger work5 = b5.getWork(); assertTrue(chain.add(b4)); assertTrue(chain.add(b5)); Threading.waitForUserCode(); assertEquals(3, txns.size()); assertEquals(1, txns.get(0).getConfidence().getAppearedAtChainHeight()); assertEquals(2, txns.get(1).getConfidence().getAppearedAtChainHeight()); assertEquals(3, txns.get(2).getConfidence().getAppearedAtChainHeight()); assertEquals(3, txns.get(0).getConfidence().getDepthInBlocks()); assertEquals(2, txns.get(1).getConfidence().getDepthInBlocks()); assertEquals(1, txns.get(2).getConfidence().getDepthInBlocks()); assertEquals(work1.add(work2).add(work3), txns.get(0).getConfidence().getWorkDone()); assertEquals(work2.add(work3), txns.get(1).getConfidence().getWorkDone()); assertEquals(work3, txns.get(2).getConfidence().getWorkDone()); // Now we add another block to make the alternative chain longer. Block b6 = b5.createNextBlock(someOtherGuy); BigInteger work6 = b6.getWork(); assertTrue(chain.add(b6)); // // genesis -> b1 -> b2 -> b3 // \-> b4 -> b5 -> b6 // assertEquals(3, txns.size()); assertEquals(1, txns.get(0).getConfidence().getAppearedAtChainHeight()); assertEquals(4, txns.get(0).getConfidence().getDepthInBlocks()); assertEquals(work1.add(work4).add(work5).add(work6), txns.get(0).getConfidence().getWorkDone()); // Transaction 1 (in block b2) is now on a side chain, so it goes pending (not see in chain). assertEquals(ConfidenceType.PENDING, txns.get(1).getConfidence().getConfidenceType()); try { txns.get(1).getConfidence().getAppearedAtChainHeight(); fail(); } catch (IllegalStateException e) {} assertEquals(0, txns.get(1).getConfidence().getDepthInBlocks()); assertEquals(BigInteger.ZERO, txns.get(1).getConfidence().getWorkDone()); // ... and back to the first chain. Block b7 = b3.createNextBlock(coinsTo); BigInteger work7 = b7.getWork(); Block b8 = b7.createNextBlock(coinsTo); BigInteger work8 = b7.getWork(); assertTrue(chain.add(b7)); assertTrue(chain.add(b8)); // // genesis -> b1 -> b2 -> b3 -> b7 -> b8 // \-> b4 -> b5 -> b6 // // This should be enabled, once we figure out the best way to inform the user of how the wallet is changing // during the re-org. //assertEquals(5, txns.size()); assertEquals(1, txns.get(0).getConfidence().getAppearedAtChainHeight()); assertEquals(2, txns.get(1).getConfidence().getAppearedAtChainHeight()); assertEquals(3, txns.get(2).getConfidence().getAppearedAtChainHeight()); assertEquals(5, txns.get(0).getConfidence().getDepthInBlocks()); assertEquals(4, txns.get(1).getConfidence().getDepthInBlocks()); assertEquals(3, txns.get(2).getConfidence().getDepthInBlocks()); BigInteger newWork1 = work1.add(work2).add(work3).add(work7).add(work8); assertEquals(newWork1, txns.get(0).getConfidence().getWorkDone()); BigInteger newWork2 = work2.add(work3).add(work7).add(work8); assertEquals(newWork2, txns.get(1).getConfidence().getWorkDone()); BigInteger newWork3 = work3.add(work7).add(work8); assertEquals(newWork3, txns.get(2).getConfidence().getWorkDone()); assertEquals("250.00", Utils.bitcoinValueToFriendlyString(wallet.getBalance())); // Now add two more blocks that don't send coins to us. Despite being irrelevant the wallet should still update. Block b9 = b8.createNextBlock(someOtherGuy); Block b10 = b9.createNextBlock(someOtherGuy); chain.add(b9); chain.add(b10); BigInteger extraWork = b9.getWork().add(b10.getWork()); assertEquals(7, txns.get(0).getConfidence().getDepthInBlocks()); assertEquals(6, txns.get(1).getConfidence().getDepthInBlocks()); assertEquals(5, txns.get(2).getConfidence().getDepthInBlocks()); assertEquals(newWork1.add(extraWork), txns.get(0).getConfidence().getWorkDone()); assertEquals(newWork2.add(extraWork), txns.get(1).getConfidence().getWorkDone()); assertEquals(newWork3.add(extraWork), txns.get(2).getConfidence().getWorkDone()); } @Test public void orderingInsideBlock() throws Exception { // Test that transactions received in the same block have their ordering preserved when reorganising. // This covers issue 468. // Receive some money to the wallet. Transaction t1 = TestUtils.createFakeTx(unitTestParams, Utils.COIN, coinsTo); final Block b1 = TestUtils.makeSolvedTestBlock(unitTestParams.genesisBlock, t1); chain.add(b1); // Send a couple of payments one after the other (so the second depends on the change output of the first). wallet.allowSpendingUnconfirmedTransactions(); Transaction t2 = checkNotNull(wallet.createSend(new ECKey().toAddress(unitTestParams), Utils.CENT)); wallet.commitTx(t2); Transaction t3 = checkNotNull(wallet.createSend(new ECKey().toAddress(unitTestParams), Utils.CENT)); wallet.commitTx(t3); chain.add(TestUtils.makeSolvedTestBlock(b1, t2, t3)); final BigInteger coins0point98 = Utils.COIN.subtract(Utils.CENT).subtract(Utils.CENT); assertEquals(coins0point98, wallet.getBalance()); // Now round trip the wallet and force a re-org. ByteArrayOutputStream bos = new ByteArrayOutputStream(); wallet.saveToFileStream(bos); wallet = Wallet.loadFromFileStream(new ByteArrayInputStream(bos.toByteArray())); final Block b2 = TestUtils.makeSolvedTestBlock(b1, t2, t3); final Block b3 = TestUtils.makeSolvedTestBlock(b2); chain.add(b2); chain.add(b3); // And verify that the balance is as expected. Because signatures are currently non-deterministic if the order // isn't being stored correctly this should fail 50% of the time. assertEquals(coins0point98, wallet.getBalance()); } @Test public void coinbaseDeath() throws Exception { // Check that a coinbase tx is marked as dead after a reorg rather than pending as normal non-double-spent // transactions would be. Also check that a dead coinbase on a sidechain is resurrected if the sidechain // becomes the best chain once more. final ArrayList<Transaction> txns = new ArrayList<Transaction>(3); wallet.addEventListener(new AbstractWalletEventListener() { @Override public void onCoinsReceived(Wallet wallet, Transaction tx, BigInteger prevBalance, BigInteger newBalance) { txns.add(tx); } }); // Start by building three blocks on top of the genesis block. // The first block contains a normal transaction that spends to coinTo. // The second block contains a coinbase transaction that spends to coinTo2. // The third block contains a normal transaction that spends to coinTo. Block b1 = unitTestParams.getGenesisBlock().createNextBlock(coinsTo); Block b2 = b1.createNextBlockWithCoinbase(wallet.getKeys().get(1).getPubKey()); Block b3 = b2.createNextBlock(coinsTo); log.debug("Adding block b1"); assertTrue(chain.add(b1)); log.debug("Adding block b2"); assertTrue(chain.add(b2)); log.debug("Adding block b3"); assertTrue(chain.add(b3)); // We now have the following chain: // genesis -> b1 -> b2 -> b3 // // Check we have seen the three transactions. Threading.waitForUserCode(); assertEquals(3, txns.size()); // Check the coinbase transaction is building and in the unspent pool only. assertEquals(ConfidenceType.BUILDING, txns.get(1).getConfidence().getConfidenceType()); assertTrue(!wallet.pending.containsKey(txns.get(1).getHash())); assertTrue(wallet.unspent.containsKey(txns.get(1).getHash())); assertTrue(!wallet.spent.containsKey(txns.get(1).getHash())); assertTrue(!wallet.dead.containsKey(txns.get(1).getHash())); // Fork like this: // // genesis -> b1 -> b2 -> b3 // \-> b4 -> b5 -> b6 // // The b4/ b5/ b6 is now the best chain Block b4 = b1.createNextBlock(someOtherGuy); Block b5 = b4.createNextBlock(someOtherGuy); Block b6 = b5.createNextBlock(someOtherGuy); log.debug("Adding block b4"); assertTrue(chain.add(b4)); log.debug("Adding block b5"); assertTrue(chain.add(b5)); log.debug("Adding block b6"); assertTrue(chain.add(b6)); Threading.waitForUserCode(); // Transaction 1 (in block b2) is now on a side chain and should have confidence type of dead and be in the dead pool only assertEquals(TransactionConfidence.ConfidenceType.DEAD, txns.get(1).getConfidence().getConfidenceType()); assertTrue(!wallet.pending.containsKey(txns.get(1).getHash())); assertTrue(!wallet.unspent.containsKey(txns.get(1).getHash())); assertTrue(!wallet.spent.containsKey(txns.get(1).getHash())); assertTrue(wallet.dead.containsKey(txns.get(1).getHash())); // ... and back to the first chain. Block b7 = b3.createNextBlock(coinsTo); Block b8 = b7.createNextBlock(coinsTo); log.debug("Adding block b7"); assertTrue(chain.add(b7)); log.debug("Adding block b8"); assertTrue(chain.add(b8)); Threading.waitForUserCode(); // // genesis -> b1 -> b2 -> b3 -> b7 -> b8 // \-> b4 -> b5 -> b6 // // The coinbase transaction should now have confidence type of building once more and in the unspent pool only. assertEquals(TransactionConfidence.ConfidenceType.BUILDING, txns.get(1).getConfidence().getConfidenceType()); assertTrue(!wallet.pending.containsKey(txns.get(1).getHash())); assertTrue(wallet.unspent.containsKey(txns.get(1).getHash())); assertTrue(!wallet.spent.containsKey(txns.get(1).getHash())); assertTrue(!wallet.dead.containsKey(txns.get(1).getHash())); // ... make the side chain dominant again. Block b9 = b6.createNextBlock(coinsTo); Block b10 = b9.createNextBlock(coinsTo); log.debug("Adding block b9"); assertTrue(chain.add(b9)); log.debug("Adding block b10"); assertTrue(chain.add(b10)); Threading.waitForUserCode(); // // genesis -> b1 -> b2 -> b3 -> b7 -> b8 // \-> b4 -> b5 -> b6 -> b9 -> b10 // // The coinbase transaction should now have the confidence type of dead and be in the dead pool only. assertEquals(TransactionConfidence.ConfidenceType.DEAD, txns.get(1).getConfidence().getConfidenceType()); assertTrue(!wallet.pending.containsKey(txns.get(1).getHash())); assertTrue(!wallet.unspent.containsKey(txns.get(1).getHash())); assertTrue(!wallet.spent.containsKey(txns.get(1).getHash())); assertTrue(wallet.dead.containsKey(txns.get(1).getHash())); } }
apache-2.0
hanks-zyh/FlyWoo
app/src/main/java/com/zjk/wifiproject/socket/udp/UDPMessageListener.java
25556
package com.zjk.wifiproject.socket.udp; import android.content.Context; import android.content.Intent; import android.os.Handler; import com.orhanobut.logger.Logger; import com.zjk.wifiproject.BaseApplication; import com.zjk.wifiproject.config.ConfigBroadcast; import com.zjk.wifiproject.config.ConfigIntent; import com.zjk.wifiproject.entity.Message; import com.zjk.wifiproject.entity.Users; import com.zjk.wifiproject.socket.tcp.TcpClient; import com.zjk.wifiproject.socket.tcp.TcpService; import com.zjk.wifiproject.sql.SqlDBOperate; import com.zjk.wifiproject.util.GsonUtils; import com.zjk.wifiproject.util.L; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.SocketException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; public class UDPMessageListener implements Runnable { private static final String TAG = "SZU_UDPMessageListener"; private static final int POOL_SIZE = 5; // 单个CPU线程池大小 private static final int BUFFERLENGTH = 1024; // 缓冲大小 private static byte[] sendBuffer = new byte[BUFFERLENGTH]; private static byte[] receiveBuffer = new byte[BUFFERLENGTH]; private HashMap<String, String> mLastMsgCache; // 最后一条消息缓存,以IMEI为KEY private ArrayList<Users> mUnReadPeopleList; // 未读消息的用户队列 private HashMap<String, Users> mOnlineUsers; // 在线用户集合,以IMEI为KEY private String BROADCASTIP; private Thread receiveUDPThread; private boolean isThreadRunning; private List<OnNewMsgListener> mListenerList; private Users mLocalUser; // 本机用户对象 private SqlDBOperate mDBOperate; private static ExecutorService executor; private static DatagramSocket UDPSocket; private static DatagramPacket sendDatagramPacket; private DatagramPacket receiveDatagramPacket; private static Context mContext; private static UDPMessageListener instance; private Handler mHanlder; private UDPMessageListener() { BROADCASTIP = "255.255.255.255"; // BROADCASTIP = WifiUtils.getBroadcastAddress(); mDBOperate = new SqlDBOperate(mContext); mListenerList = new ArrayList<OnNewMsgListener>(); mOnlineUsers = new LinkedHashMap<String, Users>(); mLastMsgCache = new HashMap<String, String>(); mUnReadPeopleList = new ArrayList<Users>(); int cpuNums = Runtime.getRuntime().availableProcessors(); executor = Executors.newFixedThreadPool(cpuNums * POOL_SIZE); // 根据CPU数目初始化线程池 } /** * <p/> * 获取UDPSocketThread实例 * <p/> * 单例模式,返回唯一实例 * * @return instance */ public static UDPMessageListener getInstance(Context context) { if (instance == null) { mContext = context; instance = new UDPMessageListener(); } return instance; } @Override public void run() { while (isThreadRunning) { try { //这是一个阻塞的方法 UDPSocket.receive(receiveDatagramPacket); } catch (IOException e) { isThreadRunning = false; receiveDatagramPacket = null; if (UDPSocket != null) { UDPSocket.close(); UDPSocket = null; } receiveUDPThread = null; L.e(TAG, "UDP数据包接收失败!线程停止"); showToast("UDP数据包接收失败!线程停止"); e.printStackTrace(); break; } if (receiveDatagramPacket.getLength() == 0) { L.e(TAG, "无法接收UDP数据或者接收到的UDP数据为空"); showToast("UDP数据包接收失败!线程停止"); continue; } String resStr = ""; //接收到的字符串 try { resStr = new String(receiveBuffer, 0, receiveDatagramPacket.getLength(), "gbk"); } catch (UnsupportedEncodingException e) { L.e(TAG, "系统不支持GBK编码"); } //打印一下 // showToast(resStr); Logger.i("接收到" + resStr); String senderIp = receiveDatagramPacket.getAddress().getHostAddress(); //将json串转成IPMSGProtocol对象 IPMSGProtocol ipmsgRes = GsonUtils.jsonToBean(resStr, IPMSGProtocol.class); processMessage(ipmsgRes, senderIp); // 每次接收完UDP数据后,重置长度。否则可能会导致下次收到数据包被截断。 if (receiveDatagramPacket != null) { receiveDatagramPacket.setLength(BUFFERLENGTH); } }//while receiveDatagramPacket = null; if (UDPSocket != null) { UDPSocket.close(); UDPSocket = null; } receiveUDPThread = null; } /** * 处理接收到的UDP数据 * * @param ipmsgRes * @param senderIp */ public void processMessage(IPMSGProtocol ipmsgRes, String senderIp) { int commandNo = ipmsgRes.commandNo; Logger.i("处理来自:" + senderIp + "命令:" + commandNo); TcpService tcpService = TcpService.getInstance(mContext); tcpService.setHandler(mHanlder); TcpClient tcpClient = TcpClient.getInstance(mContext); tcpClient.setHandler(mHanlder); switch (commandNo) { /*-------------------服务器------------------------------*/ case IPMSGConst.NO_CONNECT_SUCCESS: { //接收到客户端连接成功 // showToast("收到上线通知"); //确认指令 sendUDPdata(getConfirmCommand(IPMSGConst.AN_CONNECT_SUCCESS, ipmsgRes.targetIP, senderIp)); L.i(TAG, "成功发送上线应答"); // showToast("成功发送上线应答"); } break; case IPMSGConst.NO_SEND_TXT: { //客户端发来文本消息 Logger.i("客户端发来文本消息"); //新消息广播 Message textMsg = ipmsgRes.addObject; Intent intent = new Intent(ConfigBroadcast.ACTION_NEW_MSG); intent.putExtra(ConfigIntent.EXTRA_NEW_MSG_TYPE, ConfigIntent.NEW_MSG_TYPE_TXT); intent.putExtra(ConfigIntent.EXTRA_NEW_MSG_CONTENT, textMsg.getMsgContent()); mContext.sendBroadcast(intent); sendUDPdata(getConfirmCommand(IPMSGConst.AN_SEND_TXT, ipmsgRes.targetIP, senderIp)); } break; case IPMSGConst.NO_SEND_IMAGE: { //客户端发来图片 Logger.i("客户端发来图片请求"); // showToast("客户端发来图片请求"); tcpService.setSavePath(BaseApplication.IMAG_PATH); tcpService.startReceive(); IPMSGProtocol command = getConfirmCommand(IPMSGConst.AN_SEND_IMAGE, ipmsgRes.targetIP, senderIp); command.addObject = ipmsgRes.addObject; sendUDPdata(command); } break; case IPMSGConst.NO_SEND_VOICE: { //客户端发来语音 Logger.i("客户端发来语音请求"); // showToast("客户端发来语音请求"); tcpService.setSavePath(BaseApplication.VOICE_PATH); tcpService.startReceive(); IPMSGProtocol command = getConfirmCommand(IPMSGConst.AN_SEND_VOICE, ipmsgRes.targetIP, senderIp); command.addObject = ipmsgRes.addObject; sendUDPdata(command); } break; case IPMSGConst.NO_SEND_VEDIO: { //发送视频 Logger.i("客户端发送视频请求"); // showToast("客户端发送视频请求"); tcpService.setSavePath(BaseApplication.VEDIO_PATH); tcpService.startReceive(); IPMSGProtocol command = getConfirmCommand(IPMSGConst.AN_SEND_VEDIO, ipmsgRes.targetIP, senderIp); command.addObject = ipmsgRes.addObject; sendUDPdata(command); Intent intent = new Intent(ConfigBroadcast.ACTION_NEW_MSG); intent.putExtra(ConfigIntent.EXTRA_NEW_MSG_TYPE, ConfigIntent.NEW_MSG_TYPE_VEDIO); intent.putExtra(ConfigIntent.EXTRA_NEW_MSG_CONTENT, ipmsgRes.addObject.getMsgContent()); mContext.sendBroadcast(intent); } break; case IPMSGConst.NO_SEND_MUSIC: { //发送音乐 Logger.i("客户端发送音乐请求"); // showToast("客户端发送音乐请求"); tcpService.setSavePath(BaseApplication.MUSIC_PATH); tcpService.startReceive(); IPMSGProtocol command = getConfirmCommand(IPMSGConst.AN_SEND_MUSIC, ipmsgRes.targetIP, senderIp); command.addObject = ipmsgRes.addObject; sendUDPdata(command); Intent intent = new Intent(ConfigBroadcast.ACTION_NEW_MSG); intent.putExtra(ConfigIntent.EXTRA_NEW_MSG_TYPE, ConfigIntent.NEW_MSG_TYPE_MUSIC); intent.putExtra(ConfigIntent.EXTRA_NEW_MSG_CONTENT, ipmsgRes.addObject.getMsgContent()); mContext.sendBroadcast(intent); } break; case IPMSGConst.NO_SEND_FILE: { //发送文件 Logger.i("客户端发送文件请求"); // showToast("客户端发送文件请求"); tcpService.setSavePath(BaseApplication.FILE_PATH); tcpService.startReceive(); IPMSGProtocol command = getConfirmCommand(IPMSGConst.AN_SEND_FILE, ipmsgRes.targetIP, senderIp); command.addObject = ipmsgRes.addObject; sendUDPdata(command); Intent intent = new Intent(ConfigBroadcast.ACTION_NEW_MSG); intent.putExtra(ConfigIntent.EXTRA_NEW_MSG_TYPE, ConfigIntent.NEW_MSG_TYPE_FILE); intent.putExtra(ConfigIntent.EXTRA_NEW_MSG_CONTENT, ipmsgRes.addObject.getMsgContent()); mContext.sendBroadcast(intent); } break; /*-------------------客户端------------------------------*/ case IPMSGConst.AN_CONNECT_SUCCESS: { //服务器确认连接成功 } break; case IPMSGConst.AN_SEND_TXT: { //服务器确认成功接收文本消息 } break; case IPMSGConst.AN_SEND_IMAGE: { //服务器确认成功接收图片 Message textMsg = ipmsgRes.addObject; Logger.d("接收方确认图片请求,发送的文件为" + textMsg.getMsgContent()); // showToast("开始发送图片"); tcpClient.startSend(); tcpClient.sendFile(textMsg.getMsgContent(), senderIp, Message.CONTENT_TYPE.IMAGE); } break; case IPMSGConst.AN_SEND_VOICE: { //服务器确认成功接收图片 Message textMsg = ipmsgRes.addObject; Logger.d("接收方确认语音请求,发送的文件为" + textMsg.getMsgContent()); // showToast("开始发送语音"); tcpClient.startSend(); tcpClient.sendFile(textMsg.getMsgContent(), senderIp, Message.CONTENT_TYPE.VOICE); } break; case IPMSGConst.AN_SEND_VEDIO: { //服务器确认接收视频 Message textMsg = ipmsgRes.addObject; Logger.d("接收方确认文件请求,发送的文件为" + textMsg.getMsgContent()); // showToast("开始发送文件"); tcpClient.startSend(); tcpClient.sendFile(textMsg.getMsgContent(), senderIp, Message.CONTENT_TYPE.VEDIO); } break; case IPMSGConst.AN_SEND_MUSIC: { //服务器确认接收音乐 Message textMsg = ipmsgRes.addObject; Logger.d("接收方确认文件请求,发送的文件为" + textMsg.getMsgContent()); // showToast("开始发送文件"); tcpClient.startSend(); tcpClient.sendFile(textMsg.getMsgContent(), senderIp, Message.CONTENT_TYPE.MUSIC); } break; case IPMSGConst.AN_SEND_FILE: { //服务器确认接收文件 Message textMsg = ipmsgRes.addObject; Logger.d("接收方确认文件请求,发送的文件为" + textMsg.getMsgContent()); // showToast("开始发送文件"); tcpClient.startSend(); tcpClient.sendFile(textMsg.getMsgContent(), senderIp, Message.CONTENT_TYPE.FILE); } break; /* // 收到上线数据包,添加用户,并回送IPMSG_ANSENTRY应答。 case IPMSGConst.IPMSG_BR_ENTRY: { L.i(TAG, "收到上线通知"); showToast("收到上线通知"); // addUser(ipmsgRes); sendUDPdata(IPMSGConst.IPMSG_ANSENTRY, senderIp, mLocalUser); L.i(TAG, "成功发送上线应答"); showToast("成功发送上线应答"); } break; // 收到上线应答,更新在线用户列表 case IPMSGConst.IPMSG_ANSENTRY: { L.i(TAG, "收到上线应答"); showToast("收到上线应答"); // addUser(ipmsgRes); } break; // 收到下线广播 case IPMSGConst.IPMSG_BR_EXIT: { removeOnlineUser(senderIMEI, 1); L.i(TAG, "成功删除imei为" + senderIMEI + "的用户"); } break; case IPMSGConst.IPMSG_REQUEST_IMAGE_DATA: L.i(TAG, "收到IMAGE发送请求"); tcpService = TcpService.getInstance(mContext); tcpService.setSavePath(BaseApplication.IMAG_PATH); tcpService.startReceive(); sendUDPdata(IPMSGConst.IPMSG_CONFIRM_IMAGE_DATA, senderIp); break; case IPMSGConst.IPMSG_REQUEST_VOICE_DATA: L.i(TAG, "收到VOICE发送请求"); tcpService = TcpService.getInstance(mContext); tcpService.setSavePath(BaseApplication.VOICE_PATH); tcpService.startReceive(); sendUDPdata(IPMSGConst.IPMSG_CONFIRM_VOICE_DATA, senderIp); break; case IPMSGConst.IPMSG_SENDMSG: { L.i(TAG, "收到MSG消息"); Message msg = (Message) ipmsgRes.getAddObject(); switch (msg.getContentType()) { case TEXT: Intent intent = new Intent(ConfigBroadcast.ACTION_NEW_MSG); intent.putExtra("msg", msg.getMsgContent()); mContext.sendBroadcast(intent); sendUDPdata(IPMSGConst.IPMSG_RECVMSG, senderIp, ipmsgRes.getPacketNo()); break; case IMAGE: L.i(TAG, "收到图片信息"); msg.setMsgContent(BaseApplication.IMAG_PATH + File.separator + msg.getSenderIMEI() + File.separator + msg.getMsgContent()); String THUMBNAIL_PATH = BaseApplication.THUMBNAIL_PATH + File.separator + msg.getSenderIMEI(); L.d(TAG, "缩略图文件夹路径:" + THUMBNAIL_PATH); L.d(TAG, "图片文件路径:" + msg.getMsgContent()); ImageUtils.createThumbnail(mContext, msg.getMsgContent(), THUMBNAIL_PATH + File.separator); break; case VOICE: L.i(TAG, "收到录音信息"); msg.setMsgContent(BaseApplication.VOICE_PATH + File.separator + msg.getSenderIMEI() + File.separator + msg.getMsgContent()); L.d(TAG, "文件路径:" + msg.getMsgContent()); break; case FILE: L.i(TAG, "收到文件 发送请求"); tcpService = TcpService.getInstance(mContext); tcpService.setSavePath(BaseApplication.FILE_PATH); tcpService.startReceive(); sendUDPdata(IPMSGConst.IPMSG_CONFIRM_FILE_DATA, senderIp); msg.setMsgContent(BaseApplication.FILE_PATH + File.separator + msg.getSenderIMEI() + File.separator + msg.getMsgContent()); L.d(TAG, "文件路径:" + msg.getMsgContent()); break; } // 加入数据库 // mDBOperate.addChattingInfo(senderIMEI, SessionUtils.getIMEI(), msg.getSendTime(), // msg.getMsgContent(), msg.getContentType()); // 加入未读消息列表 android.os.Message pMessage = new android.os.Message(); pMessage.what = commandNo; pMessage.obj = msg; // ChatActivity v = ActivitiesManager.getChatActivity(); // if (v == null) { // addUnReadPeople(getOnlineUser(senderIMEI)); // 添加到未读用户列表 showToast("listenerSize=" + mListenerList.size()); for (int i = 0; i < mListenerList.size(); i++) { android.os.Message pMsg = new android.os.Message(); pMsg.what = commandNo; mListenerList.get(i).processMessage(pMsg); } // } // else { // v.processMessage(pMessage); // } // addLastMsgCache(senderIMEI, msg); // 添加到消息缓存 BaseApplication.playNotification(); } break; default: L.i(TAG, "收到命令:" + commandNo); android.os.Message pMessage = new android.os.Message(); pMessage.what = commandNo; // ChatActivity v = ActivitiesManager.getChatActivity(); // if (v != null) { //// v.processMessage(pMessage); // } break; */ } // End of switch //回调处理 callBack(ipmsgRes); } /** * 创建一个新的指令 * * @param commandNo * @param senderIp * @param targetIP */ private IPMSGProtocol getConfirmCommand(int commandNo, String senderIp, String targetIP) { IPMSGProtocol command = new IPMSGProtocol(); command.commandNo = commandNo; command.senderIP = senderIp; command.targetIP = targetIP; command.packetNo = new Date().getTime() + ""; return command; } /** * 回调给Listener * * @param ipmsgRes */ private void callBack(IPMSGProtocol ipmsgRes) { // showToast("listenerSize=" + mListenerList.size()); for (int i = 0; i < mListenerList.size(); i++) { mListenerList.get(i).processMessage(ipmsgRes); } } /** * 建立Socket连接 * * 绑定监听的端口,初始化一个数据报包用来接收 */ public void connectUDPSocket() { try { // 绑定端口 if (UDPSocket == null) UDPSocket = new DatagramSocket(IPMSGConst.PORT); L.i(TAG, "connectUDPSocket() 绑定端口成功"); // 创建数据接受包 if (receiveDatagramPacket == null) receiveDatagramPacket = new DatagramPacket(receiveBuffer, BUFFERLENGTH); startUDPSocketThread(); } catch (SocketException e) { e.printStackTrace(); } } /** * 开始监听线程 * */ public void startUDPSocketThread() { if (receiveUDPThread == null) { //将本runnable的实现放进thread receiveUDPThread = new Thread(this); receiveUDPThread.start(); //执行run方法 } isThreadRunning = true; L.i(TAG, "startUDPSocketThread() 线程启动成功"); } /** * 暂停监听线程 * */ public void stopUDPSocketThread() { isThreadRunning = false; if (receiveUDPThread != null) receiveUDPThread.interrupt(); receiveUDPThread = null; instance = null; // 置空, 消除静态变量引用 L.i(TAG, "stopUDPSocketThread() 线程停止成功"); } public void addMsgListener(OnNewMsgListener listener) { //等会要发送消息的对象列表 this.mListenerList.add(listener); } public void removeMsgListener(OnNewMsgListener listener) { this.mListenerList.remove(listener); } /** * 发送UDP数据包 * * @param ipmsgProtocol 附加的Json指令 */ public static void sendUDPdata(final IPMSGProtocol ipmsgProtocol) { final String targetIP = ipmsgProtocol.targetIP; executor.execute(new Runnable() { @Override public void run() { try { Logger.i(targetIP); InetAddress targetAddr = InetAddress.getByName(targetIP); // 目的地址 sendBuffer = GsonUtils.beanToJson(ipmsgProtocol).getBytes("gbk"); sendDatagramPacket = new DatagramPacket(sendBuffer, sendBuffer.length, targetAddr, IPMSGConst.PORT); UDPSocket.send(sendDatagramPacket); L.i(TAG, "sendUDPdata() 数据发送成功"); } catch (Exception e) { e.printStackTrace(); L.e(TAG, "sendUDPdata() 发送UDP数据包失败"); } } }); } public Users getOnlineUser(String paramIMEI) { return mOnlineUsers.get(paramIMEI); } public HashMap<String, Users> getOnlineUserMap() { return mOnlineUsers; } /** * 新增用户缓存 * * @param paramIMEI 新增记录的对应用户IMEI * @param msg 需要缓存的消息对象 */ public void addLastMsgCache(String paramIMEI, Message msg) { StringBuffer content = new StringBuffer(); switch (msg.getContentType()) { case FILE: content.append("<FILE>: ").append(msg.getMsgContent()); break; case IMAGE: content.append("<IMAGE>: ").append(msg.getMsgContent()); break; case VOICE: content.append("<VOICE>: ").append(msg.getMsgContent()); break; default: content.append(msg.getMsgContent()); break; } if (msg.getMsgContent().isEmpty()) { content.append(" "); } mLastMsgCache.put(paramIMEI, content.toString()); } /** * 获取消息缓存 * * @param paramIMEI 需要获取消息缓存记录的用户IMEI * @return */ public String getLastMsgCache(String paramIMEI) { return mLastMsgCache.get(paramIMEI); } /** * 移除消息缓存 * * @param paramIMEI 需要清除缓存的用户IMEI */ public void removeLastMsgCache(String paramIMEI) { mLastMsgCache.remove(paramIMEI); } public void clearMsgCache() { mLastMsgCache.clear(); } public void clearUnReadMessages() { mUnReadPeopleList.clear(); } /** * 新增未读消息用户 * ng * * @param people */ public void addUnReadPeople(Users people) { if (!mUnReadPeopleList.contains(people)) mUnReadPeopleList.add(people); } /** * 获取未读消息队列 * * @return */ public ArrayList<Users> getUnReadPeopleList() { return mUnReadPeopleList; } /** * 获取未读用户数 * * @return */ public int getUnReadPeopleSize() { return mUnReadPeopleList.size(); } /** * 移除指定未读用户 * * @param people */ public void removeUnReadPeople(Users people) { if (mUnReadPeopleList.contains(people)) mUnReadPeopleList.remove(people); } public void setHandler(Handler mHandler) { this.mHanlder = mHandler; } /** * 新消息处理接口 */ public interface OnNewMsgListener { void processMessage(IPMSGProtocol pMsg); } public void showToast(final String s) { /* ((Activity) mContext).runOnUiThread(new Runnable() { @Override public void run() { T.show(mContext, s); } });*/ } }
apache-2.0
edgars/wso2-hacks
internetbank/src/main/java/daoImpl/Admin_Login_DaoImpl.java
4001
package daoImpl; import java.sql.Timestamp; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.struts2.interceptor.SessionAware; import org.hibernate.classic.Session; import org.hibernate.*; import com.opensymphony.xwork2.ActionContext; import com.opensymphony.xwork2.ModelDriven; import actionForm.Admin_Login; import actionForm.Admin_LoginMan; import util.HibernateUtil; /** * @author VS60001724 * */ public class Admin_Login_DaoImpl extends HibernateUtil implements ModelDriven<Object>, SessionAware { private Map<String, Object> usersession; /** * @param login Admin Login Method * @return */ public Admin_Login checkLogin(Admin_Login login) { usersession = ActionContext.getContext().getSession(); Session session = HibernateUtil.getSessionFactory().getCurrentSession(); String userName = null; String password = null; String bank_id = null; userName = login.getUserName(); password = login.getPassword(); bank_id = login.getBank_id(); String SQL_QUERY = "SELECT login FROM Admin_Login login WHERE login.userName = '" + userName + "' AND login.password = '" + password + "' AND login.bank_id = '" + bank_id + "'"; try { System.out.println(SQL_QUERY); session.beginTransaction(); Query query = session.createQuery(SQL_QUERY); @SuppressWarnings("rawtypes") Iterator it = query.iterate(); if (it.hasNext()) { login = (Admin_Login) it.next(); Admin_LoginMan rr = new Admin_LoginMan(); rr.setBank_id(bank_id); System.out.println("From DA:" + rr.getBank_id()); java.util.Date date = new java.util.Date(); date = new Timestamp(date.getTime()); rr.setCreated(date); session.save(rr); //Saving Last Login String SQL_QUERY1 = "SELECT depo.created FROM Admin_LoginMan depo WHERE depo.bank_id ='" + bank_id + "' ORDER BY depo.id DESC"; Query query1 = session.createQuery(SQL_QUERY1); @SuppressWarnings("rawtypes") List results = query1.list(); try { String se = results.get(1).toString(); usersession.put("user2", se); } catch(Exception e) { System.out.println(e.getMessage()); } } else { login.setBank_id(null); } } catch (Exception e) { System.out.println(e.getMessage()); } session.getTransaction().commit(); return login; } /** * @param chpw Admin Change own password Method * @return */ public Admin_Login changepw(Admin_Login chpw) { usersession = ActionContext.getContext().getSession(); String abcd = (String) usersession.get("user1"); System.out.println("From DAOIMPL Class:" + abcd); String test = null; test = chpw.getOldpw(); String test2 = null; test2 = chpw.getNewpw(); Session session = HibernateUtil.getSessionFactory().getCurrentSession(); session.beginTransaction(); String SQL_QUERY = "SELECT chpw.password FROM Admin_Login chpw WHERE chpw.bank_id ='" + abcd + "' AND chpw.password='" + test + "' "; try { Query query = session.createQuery(SQL_QUERY); @SuppressWarnings("rawtypes") List results = query.list(); String f_amount = (String) results.get(0); System.out.println(f_amount); if (f_amount != null) { chpw.setTest("good"); String SQL_QUERY2 = "UPDATE Admin_Login set password = :password"; Query query2 = session.createQuery(SQL_QUERY2); query2.setParameter("password", test2); int result = query2.executeUpdate(); System.out.println("Rows affected: " + result); } else { chpw.setTest("not"); } } catch (Exception e) { chpw.setTest("not"); System.out.println(e.getMessage()); } session.getTransaction().commit(); return chpw; } public void setSession(Map<String, Object> arg0) { } public Object getModel() { return null; } }
apache-2.0
kjanosz/stock-market-sherlock
news-parser/src/main/scala/sms/news/package.scala
311
package sms import scala.concurrent.ExecutionContext import sms.core.Akka package object news { object ec { implicit lazy val context: ExecutionContext = Akka.dispatcher("akka.actor.news-parsing-dispatcher") } object withURL { def apply[T](path: String)(block: String => T) = block(path) } }
apache-2.0
LimitPointSystems/SheafSystem
tools/viewer/event/MouseEnteredEvent.java
1257
// // Copyright (c) 2014 Limit Point Systems, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package tools.viewer.event; import tools.viewer.render.*; import java.awt.event.*; /** * * Implementation <code>RenderThreadEvent</code> for * processing a mouse entered event. * */ public class MouseEnteredEvent extends RenderThreadEvent { /** * The <code>MouseEvent</code> */ protected MouseEvent mouseEvent; /** * The constructor */ public MouseEnteredEvent(MouseEvent xmouseEvent) { mouseEvent = xmouseEvent; } /** * Handle this <code>MouseEnteredEvent</code> */ public void handle(RenderThread xrenderThread) { if(abort) return; xrenderThread.mouseEntered(mouseEvent); } }
apache-2.0
NakedObjectsGroup/NakedObjectsFramework
Programming Model/NakedFunctions/NakedFunctions.ProgrammingModel/Attributes/PresentationHintAttribute.cs
1154
// Copyright Naked Objects Group Ltd, 45 Station Road, Henley on Thames, UK, RG9 1AT // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. // You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. // Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and limitations under the License. using System; using NakedFramework; namespace NakedFunctions { /// <summary> /// A hint added to the associated display element. For example to be rendered as a class on the html, and picked up by /// the CSS. /// </summary> [AttributeUsage(AttributeTargets.Class | AttributeTargets.Method | AttributeTargets.Property | AttributeTargets.Parameter)] public class PresentationHintAttribute : AbstractPresentationHintAttribute { public PresentationHintAttribute(string s) : base(s) { } } }
apache-2.0
ericsoderberg/pbc-web
ui/js/pages/domain/DomainAdd.js
278
import Add from '../../components/Add'; import DomainFormContents from './DomainFormContents'; export default class DomainAdd extends Add {} DomainAdd.defaultProps = { ...Add.defaultProps, category: 'domains', FormContents: DomainFormContents, title: 'Add Domain', };
apache-2.0
transcendent-ai-labs/DynaML
dynaml-tensorflow/src/main/scala/io/github/mandar2812/dynaml/tensorflow/data/DataApi.scala
4337
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. * */ package io.github.mandar2812.dynaml.tensorflow.data import os.Path import com.sksamuel.scrimage.Image import io.github.mandar2812.dynaml.pipes.{DataPipe, StreamDataPipe} import io.github.mandar2812.dynaml.tensorflow.api.Api import org.platanios.tensorflow.api._ import org.platanios.tensorflow.api.core.types.UByte private[tensorflow] object DataApi { val dataset: DataSet.type = DataSet val supervised_dataset: SupervisedDataSet.type = SupervisedDataSet val tf_dataset: TFDataSet.type = TFDataSet /** * Create a tensor from a collection of image data, * in a buffered manner. * * @param buff_size The size of the buffer (in number of images to load at once) * @param image_height The height, in pixels, of the image. * @param image_width The width, in pixels, of the image. * @param num_channels The number of channels in the image data. * @param coll The collection which holds the data for each image. * @param size The number of elements in the collection * */ def create_image_tensor_buffered( buff_size: Int, image_to_bytes: DataPipe[Image, Array[Byte]], image_height: Int, image_width: Int, num_channels: Int)( coll: Iterable[Path], size: Int): Tensor[UByte] = { val load_image = StreamDataPipe(DataPipe((p: Path) => Image.fromPath(p.toNIO)) > image_to_bytes) println() val tensor_splits = coll.grouped(buff_size).toIterable.zipWithIndex.map(splitAndIndex => { val split_seq = splitAndIndex._1.toStream val progress = math.round(10*splitAndIndex._2*buff_size*100.0/size)/10d print("Progress %:\t") pprint.pprintln(progress) Api.tensor_from_buffer[UByte]( split_seq.length, image_height, image_width, num_channels)(load_image(split_seq).flatten.toArray) }) Api.concatenate(tensor_splits.toSeq, 0) } /** * Create a tensor from a collection of image data, * in a buffered manner. * * @param buff_size The size of the buffer (in number of images to load at once) * @param image_height The height, in pixels, of the image. * @param image_width The width, in pixels, of the image. * @param num_channels The number of channels in the image data. * @param coll The collection which holds the data for each image. * @param size The number of elements in the collection * */ def create_image_tensor_buffered[Source]( buff_size: Int, image_sources: Seq[Source], image_process: Map[Source, DataPipe[Image, Image]], images_to_bytes: DataPipe[Seq[Image], Array[Byte]], image_height: Int, image_width: Int, num_channels: Int)( coll: Iterable[Map[Source, Seq[Path]]], size: Int): Tensor[UByte] = { val load_image = StreamDataPipe(DataPipe((images_map: Map[Source, Seq[Path]]) => { image_sources.map(source => { val images_for_source = images_map(source).map(p => image_process(source)(Image.fromPath(p.toNIO))) images_to_bytes(images_for_source) }).toArray.flatten })) println() val tensor_splits = coll.grouped(buff_size).toIterable.zipWithIndex.map(splitAndIndex => { val split_seq = splitAndIndex._1.toStream val progress = math.round(10*splitAndIndex._2*buff_size*100.0/size)/10 print("Progress %:\t") pprint.pprintln(progress) Api.tensor_from_buffer[UByte]( split_seq.length, image_height, image_width, num_channels)( load_image(split_seq).flatten.toArray) }) Api.concatenate(tensor_splits.toSeq, axis = 0) } }
apache-2.0
dreamchenzhou/MouseRace
src/com/dreamchen/useful/mouserace/view/sliding/AbHorizontalScrollView.java
3579
/* * Copyright (C) 2012 www.amsoft.cn * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dreamchen.useful.mouserace.view.sliding; import android.content.Context; import android.graphics.Rect; import android.os.Handler; import android.util.AttributeSet; import android.widget.HorizontalScrollView; // TODO: Auto-generated Javadoc /** * © 2012 amsoft.cn * 名称:AbHorizontalScrollView1.java * 描述:有滚动事件监听的HorizontalScrollView * * @author 还如一梦中 * @version v1.0 * @date:2013-11-20 下午3:00:53 */ public class AbHorizontalScrollView extends HorizontalScrollView { /** The intit position. */ private int intitPosition; /** The child width. */ private int childWidth = 0; /** The on scroll listner. */ private AbOnScrollListener onScrollListner; /** * Instantiates a new ab horizontal scroll view. * * @param context the context */ public AbHorizontalScrollView(Context context) { super(context); } /** * Instantiates a new ab horizontal scroll view. * * @param context the context * @param attrs the attrs */ public AbHorizontalScrollView(Context context, AttributeSet attrs) { super(context, attrs); } /* (non-Javadoc) * @see android.view.View#onScrollChanged(int, int, int, int) */ @Override protected void onScrollChanged(int l, int t, int oldl, int oldt) { int newPosition = getScrollX(); if (intitPosition - newPosition == 0) { if (onScrollListner == null) { return; } onScrollListner.onScrollStoped(); new Handler().postDelayed(new Runnable(){ @Override public void run(){ Rect outRect = new Rect(); getDrawingRect(outRect); if (getScrollX() == 0) { onScrollListner.onScroll(0); onScrollListner.onScrollToLeft(); } else if (childWidth + getPaddingLeft() + getPaddingRight() == outRect.right) { onScrollListner.onScroll(getScrollX()); onScrollListner.onScrollToRight(); } else { onScrollListner.onScroll(getScrollX()); } } },200); } else { intitPosition = getScrollX(); checkTotalWidth(); } super.onScrollChanged(l, t, oldl, oldt); } /** * 描述:设置监听器. * * @param listner the new on scroll listener */ public void setOnScrollListener(AbOnScrollListener listner) { onScrollListner = listner; } /** * 计算总宽. */ private void checkTotalWidth() { if (childWidth > 0) { return; } for (int i = 0; i < getChildCount(); i++) { childWidth += getChildAt(i).getWidth(); } } /** * 滚动. * * @see AbOnScrollEvent */ public interface AbOnScrollListener { /** * 滚动. * @param arg1 返回参数 */ public void onScroll(int arg1); /** * 滚动停止. */ public void onScrollStoped(); /** * 滚到了最左边. */ public void onScrollToLeft(); /** * 滚到了最右边. */ public void onScrollToRight(); } }
apache-2.0
HiroyasuNishiyama/node-red
packages/node_modules/@node-red/runtime/lib/storage/localfilesystem/projects/git/index.js
23652
/** * Copyright JS Foundation and other contributors, http://js.foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ var exec = require("../../../../exec"); var authResponseServer = require('./authServer').ResponseServer; var sshResponseServer = require('./authServer').ResponseSSHServer; var clone = require('clone'); var path = require("path"); var gitCommand = "git"; var gitVersion; var log = require("@node-red/util").log; function runGitCommand(args,cwd,env,emit) { log.trace(gitCommand + JSON.stringify(args)); args.unshift("credential.helper=") args.unshift("-c"); return exec.run(gitCommand, args, {cwd:cwd, detached:true, env:env}, emit).then(result => { return result.stdout; }).catch(result => { var stdout = result.stdout; var stderr = result.stderr; var err = new Error(stderr); err.stdout = stdout; err.stderr = stderr; if (/Connection refused/i.test(stderr)) { err.code = "git_connection_failed"; } else if (/Connection timed out/i.test(stderr)) { err.code = "git_connection_failed"; } else if (/fatal: could not read/i.test(stderr)) { // Username/Password err.code = "git_auth_failed"; } else if(/HTTP Basic: Access denied/i.test(stderr)) { err.code = "git_auth_failed"; } else if(/Permission denied \(publickey\)/i.test(stderr)) { err.code = "git_auth_failed"; } else if(/Host key verification failed/i.test(stderr)) { // TODO: handle host key verification errors separately err.code = "git_auth_failed"; } else if (/commit your changes or stash/i.test(stderr)) { err.code = "git_local_overwrite"; } else if (/CONFLICT/.test(err.stdout)) { err.code = "git_pull_merge_conflict"; } else if (/not fully merged/i.test(stderr)) { err.code = "git_delete_branch_unmerged"; } else if (/remote .* already exists/i.test(stderr)) { err.code = "git_remote_already_exists"; } else if (/does not appear to be a git repository/i.test(stderr)) { err.code = "git_not_a_repository"; } else if (/Repository not found/i.test(stderr)) { err.code = "git_repository_not_found"; } else if (/repository '.*' does not exist/i.test(stderr)) { err.code = "git_repository_not_found"; } else if (/refusing to merge unrelated histories/i.test(stderr)) { err.code = "git_pull_unrelated_history" } else if (/Please tell me who you are/i.test(stderr)) { err.code = "git_missing_user"; } else if (/name consists only of disallowed characters/i.test(stderr)) { err.code = "git_missing_user"; } throw err; }) } function runGitCommandWithAuth(args,cwd,auth,emit) { log.trace("runGitCommandWithAuth "+JSON.stringify(auth).replace(/("pass.*?"\s*:\s*").+?"/g,'$1[hidden]"')); return authResponseServer(auth).then(function(rs) { var commandEnv = clone(process.env); commandEnv.GIT_ASKPASS = path.join(__dirname,"node-red-ask-pass.sh"); commandEnv.NODE_RED_GIT_NODE_PATH = process.execPath; commandEnv.NODE_RED_GIT_SOCK_PATH = rs.path; commandEnv.NODE_RED_GIT_ASKPASS_PATH = path.join(__dirname,"authWriter.js"); return runGitCommand(args,cwd,commandEnv,emit).then( result => { rs.close(); return result; }).catch(err => { rs.close(); throw err; }); }) } function runGitCommandWithSSHCommand(args,cwd,auth,emit) { log.trace("runGitCommandWithSSHCommand "+JSON.stringify(auth).replace(/("pass.*?"\s*:\s*").+?"/g,'$1[hidden]"')); return sshResponseServer(auth).then(function(rs) { var commandEnv = clone(process.env); commandEnv.SSH_ASKPASS = path.join(__dirname,"node-red-ask-pass.sh"); commandEnv.DISPLAY = "dummy:0"; commandEnv.NODE_RED_GIT_NODE_PATH = process.execPath; commandEnv.NODE_RED_GIT_SOCK_PATH = rs.path; commandEnv.NODE_RED_GIT_ASKPASS_PATH = path.join(__dirname,"authWriter.js"); // For git < 2.3.0 commandEnv.GIT_SSH = path.join(__dirname,"node-red-ssh.sh"); commandEnv.NODE_RED_KEY_FILE=auth.key_path; // GIT_SSH_COMMAND - added in git 2.3.0 commandEnv.GIT_SSH_COMMAND = "ssh -i " + auth.key_path + " -F /dev/null"; // console.log('commandEnv:', commandEnv); return runGitCommand(args,cwd,commandEnv,emit).then( result => { rs.close(); return result; }).catch(err => { rs.close(); throw err; }); }) } function cleanFilename(name) { if (name[0] !== '"') { return name; } return name.substring(1,name.length-1); } function parseFilenames(name) { var re = /([^ "]+|(".*?"))($| -> ([^ ]+|(".*"))$)/; var m = re.exec(name); var result = []; if (m) { result.push(cleanFilename(m[1])); if (m[4]) { result.push(cleanFilename(m[4])); } } return result; } // function getBranchInfo(localRepo) { // return runGitCommand(["status","--porcelain","-b"],localRepo).then(function(output) { // var lines = output.split("\n"); // var unknownDirs = []; // var branchLineRE = /^## (No commits yet on )?(.+?)($|\.\.\.(.+?)($| \[(ahead (\d+))?.*?(behind (\d+))?\]))/m; // console.log(output); // console.log(lines); // var m = branchLineRE.exec(output); // console.log(m); // var result = {}; //commits:{}}; // if (m) { // if (m[1]) { // result.empty = true; // } // result.local = m[2]; // if (m[4]) { // result.remote = m[4]; // } // } // return result; // }); // } function getStatus(localRepo) { // parseFilename('"test with space"'); // parseFilename('"test with space" -> knownFile.txt'); // parseFilename('"test with space" -> "un -> knownFile.txt"'); var result = { files: {}, commits: {}, branches: {} } return runGitCommand(['rev-list', 'HEAD', '--count'],localRepo).then(function(count) { result.commits.total = parseInt(count); }).catch(function(err) { if (/ambiguous argument/i.test(err.message)) { result.commits.total = 0; } else { throw err; } }).then(function() { return runGitCommand(["ls-files","--cached","--others","--exclude-standard"],localRepo).then(function(output) { var lines = output.split("\n"); lines.forEach(function(l) { if (l==="") { return; } var fullName = cleanFilename(l); // parseFilename(l); var parts = fullName.split("/"); var p = result.files; var name; for (var i = 0;i<parts.length-1;i++) { var name = parts.slice(0,i+1).join("/")+"/"; if (!p.hasOwnProperty(name)) { p[name] = { type:"d" } } } result.files[fullName] = { type: /\/$/.test(fullName)?"d":"f" } }) return runGitCommand(["status","--porcelain","-b"],localRepo).then(function(output) { var lines = output.split("\n"); var unknownDirs = []; var branchLineRE = /^## (?:(?:No commits yet on )|(?:Initial commit on))?(.+?)(?:$|\.\.\.(.+?)(?:$| \[(?:(?:ahead (\d+)(?:,\s*)?)?(?:behind (\d+))?|(gone))\]))/; lines.forEach(function(line) { if (line==="") { return; } if (line[0] === "#") { var m = branchLineRE.exec(line); if (m) { result.branches.local = m[1]; if (m[2]) { result.branches.remote = m[2]; result.commits.ahead = 0; result.commits.behind = 0; } if (m[3] !== undefined) { result.commits.ahead = parseInt(m[3]); } if (m[4] !== undefined) { result.commits.behind = parseInt(m[4]); } if (m[5] !== undefined) { result.commits.ahead = result.commits.total; result.branches.remoteError = { code: "git_remote_gone" } } } return; } var status = line.substring(0,2); var fileName; var names; if (status !== '??') { names = parseFilenames(line.substring(3)); } else { names = [cleanFilename(line.substring(3))]; } fileName = names[0]; if (names.length > 1) { fileName = names[1]; } // parseFilename(fileName); if (fileName.charCodeAt(0) === 34) { fileName = fileName.substring(1,fileName.length-1); } if (result.files.hasOwnProperty(fileName)) { result.files[fileName].status = status; } else { result.files[fileName] = { type: "f", status: status }; } if (names.length > 1) { result.files[fileName].oldName = names[0]; } if (status === "??" && fileName[fileName.length-1] === '/') { unknownDirs.push(fileName); } }) var allFilenames = Object.keys(result.files); allFilenames.forEach(function(f) { var entry = result.files[f]; if (!entry.hasOwnProperty('status')) { unknownDirs.forEach(function(uf) { if (f.startsWith(uf)) { entry.status = "??" } }); } }) // console.log(files); return result; }) }) }) } function parseLog(log) { var lines = log.split("\n"); var currentCommit = {}; var commits = []; lines.forEach(function(l) { if (l === "-----") { commits.push(currentCommit); currentCommit = {} return; } var m = /^(.*): (.*)$/.exec(l); if (m) { // git 2.1.4 (Debian Stable) doesn't support %D for refs - so filter out if (m[1] === 'refs' && m[2]) { if (m[2] !== '%D') { currentCommit[m[1]] = m[2].split(",").map(function(v) { return v.trim() }); } else { currentCommit[m[1]] = []; } } else { if (m[1] === 'parents') { currentCommit[m[1]] = m[2].split(" "); } else { currentCommit[m[1]] = m[2]; } } } }); return commits; } function getRemotes(cwd) { return runGitCommand(['remote','-v'],cwd).then(function(output) { var result; if (output.length > 0) { result = {}; var remoteRE = /^(.+)\t(.+) \((.+)\)$/gm; var m; while ((m = remoteRE.exec(output)) !== null) { result[m[1]] = result[m[1]]||{}; result[m[1]][m[3]] = m[2]; } } return result; }) } function getBranches(cwd, remote) { var args = ['branch','-vv','--no-color']; if (remote) { args.push('-r'); } var branchRE = /^([ \*] )(\S+) +(\S+)(?: \[(\S+?)(?:: (?:ahead (\d+)(?:, )?)?(?:behind (\d+))?)?\])? (.*)$/; return runGitCommand(args,cwd).then(function(output) { var branches = []; var lines = output.split("\n"); branches = lines.map(function(l) { var m = branchRE.exec(l); var branch = null; if (m) { branch = { name: m[2], remote: m[4], status: { ahead: m[5]||0, behind: m[6]||0, }, commit: { sha: m[3], subject: m[7] } } if (m[1] === '* ') { branch.current = true; } } return branch; }).filter(function(v) { return !!v && v.commit.sha !== '->' }); return {branches:branches}; }) } function getBranchStatus(cwd,remoteBranch) { var commands = [ // #commits master ahead runGitCommand(['rev-list', 'HEAD','^'+remoteBranch, '--count'],cwd), // #commits master behind runGitCommand(['rev-list', '^HEAD',remoteBranch, '--count'],cwd) ]; return Promise.all(commands).then(function(results) { return { commits: { ahead: parseInt(results[0]), behind: parseInt(results[1]) } } }) } function addRemote(cwd,name,options) { var args = ["remote","add",name,options.url] return runGitCommand(args,cwd); } function removeRemote(cwd,name) { var args = ["remote","remove",name]; return runGitCommand(args,cwd); } module.exports = { init: function(_settings) { return new Promise(function(resolve,reject) { Promise.all([ runGitCommand(["--version"]), runGitCommand(["config","--global","user.name"]).catch(err=>""), runGitCommand(["config","--global","user.email"]).catch(err=>"") ]).then(function(output) { var m = / (\d\S+)/.exec(output[0]); gitVersion = m[1]; var globalUserName = output[1].trim(); var globalUserEmail = output[2].trim(); var result = { version: gitVersion }; if (globalUserName && globalUserEmail) { result.user = { name: globalUserName, email: globalUserEmail } } log.trace("git init: "+JSON.stringify(result)); resolve(result); }).catch(function(err) { log.trace("git init: git not found"); resolve(null); }); }); }, initRepo: function(cwd) { return runGitCommand(["init"],cwd); }, setUpstream: function(cwd,remoteBranch) { var args = ["branch","--set-upstream-to",remoteBranch]; return runGitCommand(args,cwd); }, pull: function(cwd,remote,branch,allowUnrelatedHistories,auth,gitUser) { var args = ["pull"]; if (remote && branch) { args.push(remote); args.push(branch); } if (gitUser && gitUser['name'] && gitUser['email']) { args.unshift('user.name="'+gitUser['name']+'"'); args.unshift('-c'); args.unshift('user.email="'+gitUser['email']+'"'); args.unshift('-c'); } if (allowUnrelatedHistories) { args.push("--allow-unrelated-histories"); } var promise; if (auth) { if ( auth.key_path ) { promise = runGitCommandWithSSHCommand(args,cwd,auth,true); } else { promise = runGitCommandWithAuth(args,cwd,auth,true); } } else { promise = runGitCommand(args,cwd,undefined,true) } return promise; // .catch(function(err) { // if (/CONFLICT/.test(err.stdout)) { // var e = new Error("pull failed - merge conflict"); // e.code = "git_pull_merge_conflict"; // throw e; // } else if (/Please commit your changes or stash/i.test(err.message)) { // var e = new Error("Pull failed - local changes would be overwritten"); // e.code = "git_pull_overwrite"; // throw e; // } // throw err; // }); }, push: function(cwd,remote,branch,setUpstream, auth) { var args = ["push"]; if (branch) { if (setUpstream) { args.push("-u"); } args.push(remote); args.push("HEAD:"+branch); } else { args.push(remote); } args.push("--porcelain"); var promise; if (auth) { if ( auth.key_path ) { promise = runGitCommandWithSSHCommand(args,cwd,auth,true); } else { promise = runGitCommandWithAuth(args,cwd,auth,true); } } else { promise = runGitCommand(args,cwd,undefined,true) } return promise.catch(function(err) { if (err.code === 'git_error') { if (/^!.*non-fast-forward/m.test(err.stdout)) { err.code = 'git_push_failed'; } throw err; } else { throw err; } }); }, clone: function(remote, auth, cwd) { var args = ["clone",remote.url]; if (remote.name) { args.push("-o"); args.push(remote.name); } if (remote.branch) { args.push("-b"); args.push(remote.branch); } args.push("."); if (auth) { if ( auth.key_path ) { return runGitCommandWithSSHCommand(args,cwd,auth,true); } else { return runGitCommandWithAuth(args,cwd,auth,true); } } else { return runGitCommand(args,cwd,undefined,true); } }, getStatus: getStatus, getFile: function(cwd, filePath, treeish) { var args = ["show",treeish+":"+filePath]; return runGitCommand(args,cwd); }, getFiles: function(cwd) { return getStatus(cwd).then(function(status) { return status.files; }) }, revertFile: function(cwd, filePath) { var args = ["checkout",filePath]; return runGitCommand(args,cwd); }, stageFile: function(cwd,file) { var args = ["add"]; if (Array.isArray(file)) { args = args.concat(file); } else { args.push(file); } return runGitCommand(args,cwd); }, unstageFile: function(cwd, file) { var args = ["reset","--"]; if (file) { args.push(file); } return runGitCommand(args,cwd); }, commit: function(cwd, message, gitUser) { var args = ["commit","-m",message]; var env; if (gitUser && gitUser['name'] && gitUser['email']) { args.unshift('user.name="'+gitUser['name']+'"'); args.unshift('-c'); args.unshift('user.email="'+gitUser['email']+'"'); args.unshift('-c'); } return runGitCommand(args,cwd,env); }, getFileDiff(cwd,file,type) { var args = ["diff","-w"]; if (type === "tree") { // nothing else to do } else if (type === "index") { args.push("--cached"); } args.push(file); return runGitCommand(args,cwd); }, fetch: function(cwd,remote,auth) { var args = ["fetch",remote]; if (auth) { if ( auth.key_path ) { return runGitCommandWithSSHCommand(args,cwd,auth); } else { return runGitCommandWithAuth(args,cwd,auth); } } else { return runGitCommand(args,cwd); } }, getCommits: function(cwd,options) { var args = ["log", "--format=sha: %H%nparents: %p%nrefs: %D%nauthor: %an%ndate: %ct%nsubject: %s%n-----"]; var limit = parseInt(options.limit) || 20; args.push("-n "+limit); var before = options.before; if (before) { args.push(before); } var commands = [ runGitCommand(['rev-list', 'HEAD', '--count'],cwd), runGitCommand(args,cwd).then(parseLog) ]; return Promise.all(commands).then(function(results) { var result = results[0]; result.count = results[1].length; result.before = before; result.commits = results[1]; return { count: results[1].length, commits: results[1], before: before, total: parseInt(results[0]) }; }) }, getCommit: function(cwd,sha) { var args = ["show",sha]; return runGitCommand(args,cwd); }, abortMerge: function(cwd) { return runGitCommand(['merge','--abort'],cwd); }, getRemotes: getRemotes, getRemoteBranch: function(cwd) { return runGitCommand(['rev-parse','--abbrev-ref','--symbolic-full-name','@{u}'],cwd).catch(function(err) { if (/no upstream configured for branch/i.test(err.message)) { return null; } throw err; }) }, getBranches: getBranches, // getBranchInfo: getBranchInfo, checkoutBranch: function(cwd, branchName, isCreate) { var args = ['checkout']; if (isCreate) { args.push('-b'); } args.push(branchName); return runGitCommand(args,cwd); }, deleteBranch: function(cwd, branchName, isRemote, force) { if (isRemote) { throw new Error("Deleting remote branches not supported"); } var args = ['branch']; if (force) { args.push('-D'); } else { args.push('-d'); } args.push(branchName); return runGitCommand(args, cwd); }, getBranchStatus: getBranchStatus, addRemote: addRemote, removeRemote: removeRemote }
apache-2.0
freedot/tstolua
tests/cases/compiler/genericFunduleInModule2.ts
146
module A { export function B<T>(x: T) { return x; } } module A { export module B { export var x = 1; } } var b: A.B; A.B(1);
apache-2.0
nicevoice/qoophp-blog
application/Controllers/Frontend/SearchController.php
2015
<?php /** * @author: Rogee<rogeeyang@gmail.com> */ namespace Controllers; use Common\FrontControllerBase; use Library\Paginator\Pager; use Models\Article; //include sphinx class SearchController extends FrontControllerBase { public function initialize() { $this->setTitle('搜索'); parent::initialize(); } public function indexAction() { $this->view->setTemplateAfter('columns'); $keyword = $this->request->get('q'); if(trim($keyword) == ''){ return $this->view->pick('search/noitem'); } $this->setVar('key', $keyword); $page_cnt = 10; $current = $this->request->get('page', 'int', 1); $page_start = ($current - 1)*$page_cnt; $cl = new \Library\SphinxClient(); $cl->SphinxClient(); $cl->SetServer ( '127.0.0.1', 9312); //以下设置用于返回数组形式的结果 $cl->SetArrayResult ( true ); //取从头开始的前20条数据,0,20类似SQl语句的LIMIT 0,20 $cl->SetLimits($page_start, $page_cnt); //在做索引时,没有进行 sql_attr_类型 设置的字段,可以作为“搜索字符串”,进行全文搜索 $res = $cl->Query( $keyword, "main_qoo_blog" ); //"*"表示在所有索引里面同时搜索,"索引名称(例如test或者test,test2)"则表示搜索指定的 $total = $res['total_found']; if($total == 0){//什么也米找着 return $this->view->pick('search/noitem'); } $in_array = array(); foreach($res['matches'] as $item){ $in_array[] = $item['id']; } //根据ID查找数据 $data = Article::getSearchResult($in_array); $this->setVar('data', $data); //给PAGER数据 $pager = new \Library\Paginator\Adapter\Search(array( 'total_pages' => ceil($total/10), 'current' => $current )); $this->setVar('pages', $pager); } }
apache-2.0
huysentruitw/projecto
src/Projecto.Autofac/ProjectorBuilderExtensions.cs
3621
/* * Copyright 2017 Wouter Huysentruit * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using Autofac; namespace Projecto.Autofac { /// <summary> /// <see cref="ProjectorBuilder{TProjectionkey, TMessageEnvelope}"/> extension methods. /// </summary> public static class ProjectorBuilderExtensions { /// <summary> /// Registers all projections that are registered as <see cref="IProjection{TProjectionKey, TMessageEnvelope}"/> on the Autofac container. /// </summary> /// <typeparam name="TProjectionKey">The type of the key that uniquely identifies a projection.</typeparam> /// <typeparam name="TMessageEnvelope">The type of the message envelope used to pass the message including custom information to the handler.</typeparam> /// <param name="builder">The builder.</param> /// <param name="componentContext">The Autofac component context.</param> /// <returns><see cref="ProjectorBuilder{TProjectionKey, TMessageEnvelope}"/> for method chaining.</returns> public static ProjectorBuilder<TProjectionKey, TMessageEnvelope> RegisterProjectionsFromAutofac<TProjectionKey, TMessageEnvelope>(this ProjectorBuilder<TProjectionKey, TMessageEnvelope> builder, IComponentContext componentContext) where TProjectionKey : IEquatable<TProjectionKey> where TMessageEnvelope : MessageEnvelope { var projections = componentContext.Resolve<IEnumerable<IProjection<TProjectionKey, TMessageEnvelope>>>(); builder.Register(projections); return builder; } /// <summary> /// Configures the builder to resolve requested dependencies from the Autofac component scope. /// </summary> /// <typeparam name="TProjectionKey">The type of the key that uniquely identifies a projection.</typeparam> /// <typeparam name="TMessageEnvelope">The type of the message envelope used to pass the message including custom information to the handler.</typeparam> /// <param name="builder">The builder.</param> /// <param name="componentContext">The Autofac component context.</param> /// <returns><see cref="ProjectorBuilder{TProjectionKey, TMessageEnvelope}"/> for method chaining.</returns> public static ProjectorBuilder<TProjectionKey, TMessageEnvelope> UseAutofacDependencyLifetimeScopeFactory<TProjectionKey, TMessageEnvelope>(this ProjectorBuilder<TProjectionKey, TMessageEnvelope> builder, IComponentContext componentContext) where TProjectionKey : IEquatable<TProjectionKey> where TMessageEnvelope : MessageEnvelope { var lifetimeScopeFactory = componentContext.Resolve<Func<ILifetimeScope>>(); Func<ILifetimeScope> childLifetimeScopeFactory = () => lifetimeScopeFactory().BeginLifetimeScope(); builder.SetDependencyLifetimeScopeFactory(new AutofacDependencyLifetimeScopeFactory(childLifetimeScopeFactory)); return builder; } } }
apache-2.0
kabassociates/aribaweb
src/util/ariba/util/core/SystemUtil.java
50214
/* Copyright 1996-2012 Ariba, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. $Id: //ariba/platform/util/core/ariba/util/core/SystemUtil.java#38 $ */ package ariba.util.core; import ariba.util.log.Log; import ariba.util.shutdown.ShutdownManager; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.net.InetAddress; import java.net.URL; import java.net.UnknownHostException; import java.util.Comparator; import java.util.List; import java.util.Map; /** System Utilities. These are helper functions for dealing with system functions. @aribaapi documented */ public final class SystemUtil { /* No constructor since no non-static methods. */ private SystemUtil () { } /* Static initialization of the ariba, config, install, and internal directories */ static String SystemDirectoryString; private static File SystemDirectory; static String ConfigDirectoryString; private static File ConfigDirectory; private static URL ConfigURL; private static File LocalTempDirectory; private static File SharedTempDirectory; private static String InternalDirectoryString = "internal"; private static File InternalDirectory; private static File InstallDirectory; private static boolean IsDevelopment; static { // setSystemDirectory and setConfigDirectory calls were moved // from here to avoid logging initialization recursion. // See assureSystemDirectory and assureConfigDirectory. InstallDirectory = getCwdFile(); InternalDirectory = new File(InstallDirectory, InternalDirectoryString); IsDevelopment = hasInternalDirectory(); } public static void sleep (long sleepTime) { if (sleepTime <= 0) { return; } try { Thread.sleep(sleepTime); } catch (InterruptedException e) { consumeInterruptedException(e); } } /** Check if two objects are equal in a null safe manner. @param one the first of two objects to compare @param two the second of two objects to compare @return <b>true</b> if the two objects <B>one</B> and <B>two</B> are equals according to the equals() method of <B>one</B> or if they're both null. <b>false</b> is returned otherwise. @aribaapi documented */ public static boolean equal (Object one, Object two) { if (one != null) { return one.equals(two); } return (two == null); } /** Convenience method that returns the <code>hashCode()</code> of <code>object</code>. The case of a <code>null</code> <code>object</code> is handled; zero is returned. <p/> This method is complementary to the method {@link #equal} which tests two objects for equality in a <code>null</code> safe manner. <p/> @param object the object for which to return the hashcode @return the hashcode value or <code>0</code> if <code>object</code> is <code>null</code> @aribaapi ariba */ public static int hashCode (Object object) { return object != null ? object.hashCode() : 0; } /** Check if two objects are equal using a deeper way to determine their equality. The &quot;deeper way &quot; is using ListUtil.listEquals for list objects, MapUtil.mapEquals for map objects, and ArrayUtil.arrayEquals for array objects @param one the first of two objects to compare @param two the second of two objects to compare @return <b>true</b> if the two objects are equal as defined above @aribaapi private */ static boolean objectEquals (Object one, Object two) { if (one == null && two == null) { return true; } if (one == null || two == null) { return false; } if ((one instanceof Map) && (two instanceof Map)) { return MapUtil.mapEquals((Map)one,(Map)two); } else if ((one instanceof List) && (two instanceof List)) { return ListUtil.listEquals((List)one, (List)two); } else if ((one instanceof Object[]) && (two instanceof Object[])) { return ArrayUtil.arrayEquals((Object[])one, (Object[])two); } else { return one.equals(two); } } /** Compares <code>first</code> to <code>second</code> and returns a negative, zero or postive integer as <code>first</code> is less than, equal to or greater than <code>second</code>, respectively. <p> <code>first</code> and <code>second</code> may be <code>null</code>. A <code>null</code> object is always considered to be less than any non-<code>null</code> object. <p> @param first the first <code>Comparable</code> to compare, may be <code>null</code> @param second the first <code>Comparable</code> to compare, may be <code>null</code> @return the result of the comparison of <code>first</code> and <code>second</code> @aribaapi ariba */ public static int compare (Comparable first, Comparable second) { return compare(first, second, false); } /** Compares <code>first</code> to <code>second</code> and returns a negative, zero or postive integer as <code>first</code> is less than, equal to or greater than <code>second</code>, respectively. <p> <code>first</code> and <code>second</code> may be <code>null</code>. A <code>null</code> object is always considered to be less than any non-<code>null</code> object. <p> @param first the first <code>Comparable</code> to compare, may be <code>null</code> @param second the first <code>Comparable</code> to compare, may be <code>null</code> @return the result of the comparison of <code>first</code> and <code>second</code> @aribaapi ariba */ public static int compare ( Comparable first, Comparable second, boolean nullComparesGreaterThanNonNull ) { int result = 0; if (first != null) { if (second != null) { return first.compareTo(second); } result = +1; } else if (second != null) { result = -1; } return nullComparesGreaterThanNonNull ? -result : result; } /** Compares <code>first</code> to <code>second</code> using the supplied <code>comparator </code>and returns a negative, zero or postive integer as <code>first</code> is less than, equal to or greater than <code>second</code>, respectively. <p> <code>first</code> and <code>second</code> may be <code>null</code>. A <code>null</code> object is always considered to be less than any non-<code>null</code> object. <p> @param first the first <code>Comparable</code> to compare, may be <code>null</code> @param second the first <code>Comparable</code> to compare, may be <code>null</code> @param comparator the <code>Comparator</code> to use when comparing, may not be <code>null</code> @return the result of the comparison of <code>first</code> and <code>second</code> @aribaapi ariba */ public static int compare (Object first, Object second, Comparator comparator) { return compare(first, second, comparator, false); } /** Compares <code>first</code> to <code>second</code> using the supplied <code>comparator </code>and returns a negative, zero or postive integer as <code>first</code> is less than, equal to or greater than <code>second</code>, respectively. <p> <code>first</code> and <code>second</code> may be <code>null</code>. A <code>null</code> object is always considered to be less than any non-<code>null</code> object. <p> @param first the first <code>Comparable</code> to compare, may be <code>null</code> @param second the first <code>Comparable</code> to compare, may be <code>null</code> @param comparator the <code>Comparator</code> to use when comparing @return the result of the comparison of <code>first</code> and <code>second</code> @aribaapi ariba */ public static int compare ( Object first, Object second, Comparator comparator, boolean nullComparesGreaterThanNonNull ) { int result = 0; if (first != null) { if (second != null) { if (comparator != null) { return comparator.compare(first, second); } return ((Comparable)first).compareTo(second); } result = +1; } else if (second != null) { result = -1; } return nullComparesGreaterThanNonNull ? -result : result; } /** Compares <code>first</code> and <code>second</code> and returns the result. <p/> If <code>falseLessThanTrue == true</code> then <code>-1</code> is returned if <code>!first && second</code> and <code>+1</code> is returned if <code>first && !second</code> and <code>0</code> when <code>first == second</code>. <p/> If <code>falseLessThanTrue == false</code> the result is the opposite. <p/> @param first the first of the two values to compare @param second the second of the two values to compare @param falseLessThanTrue whether or not <code>false</code> should be considered less than <code>true</code> @return the result @aribaapi ariba */ public static int compare ( boolean first, boolean second, boolean falseLessThanTrue ) { int result = (first == second) ? 0 : (second ? -1 : +1); return falseLessThanTrue ? result : -result; } /** Convenience function that calls {@link #compare(boolean,boolean,boolean)} with <code>falseLessThanTrue == true</code>. <p/> @aribaapi ariba */ public static int compare (boolean first, boolean second) { return compare(first, second, true); } /** If ExitException is true, SystemUtil.exit will throw an ExitException instead of calling System's exit() method. This is currently for the uses of the TestHarness which doesn't want the process to really exit. @aribaapi private @see ariba.util.core.SystemUtil#setExitException */ private static boolean ExitException; /** Used for unit tests to cause SystemUtil.exit() to thrown an exception rather than bring down the process. @aribaapi private */ public static void setExitException (boolean value) { ExitException = value; } /** Used for unit tests to check if an exception will be thrown when SystemUtil.exit() is calld. @aribaapi private */ public static boolean getExitException () { return ExitException; } /** All our unit test classes starts with this prefix. */ private static final String EligibleClassPrefix = "test.ariba."; /** Checks to make sure we are the specified class and method are invoked from unit tests. If not, will Assert. This method is useful if there are methods that should be called by our unit tests. It makes sure no other code can call the given class/method. @param className the class name @param methodName the method name @aribaapi ariba */ public static final void checkCalledFromUnitTest (String className, String methodName) { StackTraceElement[] callStack = (new Throwable()).getStackTrace(); for (int i=0; i<callStack.length; i++) { if (callStack[i].getClassName().startsWith(EligibleClassPrefix)) { return; } } if (StringUtil.nullOrEmptyOrBlankString(methodName)) { Assert.that(false, "You are not allowed to invoke %s", className); } else { Assert.that(false, "You are not allowed to invoke %s.%s", className, methodName); } } /** Terminate the java process. All shutdown hooks which are registered by components in the server will be run. @param code the exit code to pass to System.exit() @aribaapi documented */ public static void exit (int code) { if (getExitException()) { try { flushOutput(); } catch (Throwable e) { // OK // don't want to allow problem in flushOutput to // cause a stack unwind } throw new ExitException(code); } else { ShutdownManager.forceShutdown(code); } } /** Compares <B>len</B> bytes in array <B>a</B> starting with byte <B>aIndex</B> with <B>len</B> bytes in array <B>b</B> starting with byte <B>bIndex</B>. @param a array of memory to compare with b @param aIndex where to start compare in array a @param b array of memory to compare with a @param bIndex where to start compare in array b @param len the number of bytes to compare. @return <b>true</b> if each byte compared is equal, <b>false</b> otherwise @aribaapi documented */ public static boolean memoryCompare (byte[] a, int aIndex, byte[] b, int bIndex, int len) { Assert.that((aIndex >= 0) && (bIndex >= 0), "both indexes must be greater or equal to 0."); if ((a.length - aIndex < len) || (b.length - bIndex < len)) { return false; } for (int aTotal = aIndex + len; aIndex < aTotal; aIndex++, bIndex++) { if (a [aIndex] != b [bIndex]) { return false; } } return true; } /** Compute the local host name and cache it. We don't initialize 'hostname' in the declaration since it can trigger security exceptions in the client. It should only be initialized as needed. */ private static String HOST_NAME = null; /** Get the hostname of this machine. If you are in the server, you should use Server.hostname() which consults the parameters in case they override the name the OS returns @return the hostname for this machine @see ariba.rpc.server.Server#hostname() @aribaapi private */ public static String getHostname () { if (HOST_NAME == null) { HOST_NAME = getHost().getHostName(); } return HOST_NAME; } private static InetAddress HOST = null; /** Get the InetAddress for this machine. @return the InetAddress for this machine @aribaapi documented */ public static InetAddress getHost () { if (HOST == null) { HOST = setupHost(); } return HOST; } private static InetAddress setupHost () { try { return InetAddress.getLocalHost(); } catch (UnknownHostException e1) { Log.util.error(2767, e1); try { // should return 127.0.0.1 w/o exception... return InetAddress.getByName(null); } catch (UnknownHostException e2) { Log.util.error(2768, e2); return null; } } } /** helper function to convert hostname into int for database storage @aribaapi private */ public static int hostAsInt () { InetAddress tmpHost = getHost(); byte [] hostAsBytes = tmpHost.getAddress(); int bytes = hostAsBytes[0]; bytes = bytes << 8; bytes = bytes + hostAsBytes[1]; bytes = bytes << 8; bytes = bytes + hostAsBytes[2]; bytes = bytes << 8; bytes = bytes + hostAsBytes[3]; return bytes; } /** Find the current working directory. @return the present working directory of this VM @aribaapi documented */ public static String getPwd () { return System.getProperty("user.dir"); } /** Get the current working directory as a String. @return current working directory as a String @aribaapi documented */ public static String getCwd () { return getCwdFile().getAbsolutePath(); } /** Get the current working directory as a File @return current working directory as a File @aribaapi documented */ public static File getCwdFile () { //return new File("./"); String installDir = System.getProperty("ariba.server.home"); return new File(installDir != null ? StringUtil.strcat(installDir,"/./") : "./"); } /** Return the default file encoding of this VM @return the default file encoding of this VM @aribaapi documented */ public static String getFileEncoding () { return System.getProperty("file.encoding"); } /** Return the class path of this VM. See also bootClassPath() @return the class path of this VM @aribaapi documented */ public static String getClassPath () { return System.getProperty("java.class.path"); } /** Returns the boot classpath of this VM. This is only really relevant for the Sun VM. There is a switch you can use to start the VM that sets the bootClassPath instead of the classpath. @return the boot classpath of this VM @aribaapi documented */ public static String getBootClassPath () { return System.getProperty("sun.boot.class.path"); } /** Returns the complete classpath used by this VM. It contains the boot classpath as well as the class path. */ public static String getCompleteClassPath () { String bootClassPath = SystemUtil.getBootClassPath(); String classPath = SystemUtil.getClassPath(); if (StringUtil.nullOrEmptyString(classPath)) { return bootClassPath; } if (StringUtil.nullOrEmptyString(bootClassPath)) { return classPath; } return StringUtil.strcat( bootClassPath, SystemUtil.getPathSeparator(), classPath); } /** Return the path separator of this VM. That is : for Unix and ; for Windows. @return the path separator of this VM. That is : for Unix and ; for Windows. @aribaapi documented */ public static String getPathSeparator () { return System.getProperty("path.separator"); } /** Return the path separator of this VM. That is : for Unix and ; for Windows. @return the path separator of this VM. That is : for Unix and ; for Windows. @aribaapi documented */ public static char getPathSeparatorChar () { return getPathSeparator().charAt(0); } /** Return the OS architecture of this VM @return the OS architecture of this VM @aribaapi documented */ public static String getArchitecture () { return System.getProperty("os.arch"); } /** Return the OS name of this VM @return the OS name of this VM @aribaapi documented */ public static String getOperatingSystem () { return System.getProperty("os.name"); } /** Return the OS type of this VM. It will return either Win32, SunOS, AIX, HP-UX, or whatever getOperatingSystem returns if unknown. @return the OS type of this VM @aribaapi documented */ public static String getOperatingSystemType () { String os = getOperatingSystem(); if (os.indexOf("Windows") != -1) { return "Win32"; } else if (os.indexOf("Solaris") != -1 || os.indexOf("SunOS") != -1) { return "SunOS"; } else if (os.indexOf("AIX") != -1) { return "AIX"; } else if (os.indexOf("HP") != -1) { return "HP-UX"; } else if (os.indexOf("Linux") != -1) { return "Linux"; } else { return os; } } /** Return this VM's vendor @return the vendor of this VM @aribaapi documented */ public static String getJavaVendor () { return System.getProperty("java.vendor"); } /** Return the current user name @return the current user name @aribaapi documented */ public static String getUserName () { return System.getProperty("user.name"); } /** Determine if the system is a Windows based system. @return <b>true</b> if the operating system is Windows 95 or NT or 2000, <b>false</b> otherwise @aribaapi documented */ public static final boolean isWin32 () { return (getOperatingSystem().indexOf("Windows") != -1); } /** Determine if the system is a Sun based system. @return <b>true</b> if the operating system is Sun OS <b>false</b> otherwise @aribaapi documented */ public static final boolean isSunOS () { return (getOperatingSystem().indexOf("Solaris") != -1) || (getOperatingSystem().indexOf("SunOS") != -1); } /** Determine if the system is a HP based system. @return <b>true</b> if the operating system is HP-UX <b>false</b> otherwise @aribaapi documented */ public static final boolean isHP () { return (getOperatingSystem().indexOf("HP") != -1); } /** Determine if the system is a AIX based system. @return <b>true</b> if the operating system is AIX <b>false</b> otherwise @aribaapi documented */ public static final boolean isAIX () { return (getOperatingSystem().indexOf("AIX") != -1); } /** Determine if the system is a Linux based system. @return <b>true</b> if the operating system is Linux <b>false</b> otherwise @aribaapi documented */ public static final boolean isLinux () { return (getOperatingSystem().indexOf("Linux") != -1); } /** @aribaapi private */ private static BufferedReader IN; /** @aribaapi private */ private static PrintWriter OUT; /** @aribaapi private */ private static PrintWriter ERR; /** Get a Reader version of System.in @return a Reader version of System.in @aribaapi documented */ public static BufferedReader in () { if (IN == null) { try { IN = IOUtil.bufferedReader(System.in, IOUtil.getDefaultSystemEncoding()); } catch (UnsupportedEncodingException ex) { Assert.that(false, "Unable to use default system encoding %s", ex); } } return IN; } /** Get a Writer version of System.out @return a Writer version of System.out @aribaapi documented */ public static PrintWriter out () { if (OUT == null) { try { OUT = IOUtil.printWriter(System.out, IOUtil.getDefaultSystemEncoding()); } catch (UnsupportedEncodingException ex) { Assert.that(false, "Unable to use default system encoding %s", ex); } } return OUT; } /** Get a Writer version of System.err @return a Writer version of System.err @aribaapi documented */ public static PrintWriter err () { if (ERR == null) { try { ERR = IOUtil.printWriter(System.err, IOUtil.getDefaultSystemEncoding(), true); } catch (UnsupportedEncodingException ex) { Assert.that(false, "Unable to use default system encoding %s", ex); } } return ERR; } /** Set the output stream returned by SystemUtil.out(); @aribaapi private */ public static void setOut (PrintWriter pw) { OUT = pw; } /** Set the output stream returned by SystemUtil.out(); @aribaapi private */ public static void setErr (PrintWriter pw) { ERR = pw; } /** Flush system out and system error. @aribaapi documented */ public static void flushOutput () { if (OUT != null) { OUT.flush(); } if (ERR != null) { ERR.flush(); } } /** Prevent a compiler warning when you don't want to do something in a catch block. The arguments are passed so that at a later point in time we could add some kind of logging to see where in our code we use this call. @param reason text reason for why you don't want to do anything. @param t throwable (usally exception) that was thrown. */ public static final void consumeException (String reason, Throwable t) { return; } /** Provide proper handling of InterruptedException from wait() and sleep() methods. @param e exception that was thrown. */ public static final void consumeInterruptedException (InterruptedException e) { Log.util.info(2897, stackTrace(e)); // should this assert? Probably should. } /** Get a string which represents the current call stack. @return a string which represents the current call stack @aribaapi documented */ public static String stackTrace () { return stackTrace(new Exception("Stack trace")); } /** Returns String stackTrace for current thread, reduced to focus on most relevant stack frames for analyzing application execution. It removes the tail of the stackTrace starting with the ariba.ui.servletadaptor frame, because the details of servlet dispatch are boring and not helpful. It elides (replaces with ...) all ariba.ui frames (except AWKeyPathBinding), because they are boring and voluminous, and almost never helpful for understanding application performance. It elides the details of reflexive method invokation under FieldValue_Object.getFieldValue. Etc. @aribaapi private */ public static String stackTraceCodePath () { return stackTraceCodePath(stackTrace()); } /** Returns String stackTrace reduced to focus on most relevant stack frames for analyzing application execution, given a String stackTrace. It removes the tail of the stackTrace starting with the ariba.ui.servletadaptor frame, because the details of servlet dispatch are boring and not helpful. It elides (replaces with ...) all ariba.ui frames (except AWKeyPathBinding), because they are boring and voluminous, and almost never helpful for understanding application performance. It elides the details of reflexive method invokation under FieldValue_Object.getFieldValue. @aribaapi private */ public static String stackTraceCodePath (String st) { // Discard everything through last SystemUtil.stackTrace frame st = st.replaceAll( "(?s)^.*\tat ariba\\.util\\.core\\.SystemUtil\\.stackTrace.*?\n", ""); // Discard everything starting with first servletadapter stack frame, boring. // It is never interesting to see the internals of servlet dispatching. st = st.replaceAll("(?s)at ariba\\.ui\\.servletadaptor\\..*$", ""); // Discard everything starting with first rpc.server stack frame, boring. st = st.replaceAll("(?s)at ariba\\.rpc\\.server\\..*$", ""); // Discard everything starting with first ScheduledTask.run stack frame, boring. st = st.replaceAll( "(?s)at ariba\\.util\\.scheduler\\.ScheduledTask\\.run.*$", ""); // Discard java.lang.Thread.run frame. st = st.replaceAll("\tat java\\.lang\\.Thread\\.run.*\n", ""); // Protect AWKeyPathBinding from removal, good clue of AWL binding code path. st = st.replaceAll( "ariba\\.ui\\.aribaweb\\.core\\.AWKeyPathBinding", "ariba\\.UI\\.aribaweb\\.core\\.AWKeyPathBinding"); // Elide all other contiguous ariba.ui stack frames, only aribaweb developers can // get much from them, and there are often hundreds of them. Focus on app frames. st = st.replaceAll("(?m)(^\\s+at ariba\\.ui\\..*?$)+", "\t...\n"); // Elide all fieldsui ARPPage frames, don't really help much. st = st.replaceAll("\tat ariba\\.htmlui\\.fieldsui\\.ARPPage\\..*\n", "\t...\n"); // Restore protected ariba.ui... stack frames. st = st.replaceAll("ariba\\.UI\\.", "ariba\\.ui\\."); // Elide seven stack frame block associated with reflexive method invokation under // FieldValue_Object.getFieldValue. st = st.replaceAll( "(?m)(^\\s+at (sun\\.reflect\\.|java\\.lang\\.reflect\\..|" + "ariba\\.util\\.fieldvalue\\.ReflectionMethodGetter\\.|" + "ariba\\.util\\.fieldvalue\\.FieldPath\\.getFieldValue|" + "ariba\\.util\\.fieldvalue\\.FieldValue_Object\\.).*?$)+", "\t...\n"); // Elide all contiguous javascript frames until last one, mozilla and ariba. st = st.replaceAll( "(\tat org\\.mozilla\\.javascript\\..*?\n)" + "(?:\tat (?:org\\.mozilla|ariba\\.util)\\.javascript\\..*?\n)+" + "(\tat ariba\\.util\\.javascript\\..*?\n)", "\t...\n$2"); // ***** Final cleanups ***** // Keep only the first of repeated calls to the same method path, maybe // interleaved with ellipsis. st = st.replaceAll( "(\tat .*?)\\((.*?):(\\d+)\\)\\s*\n" + //1 1 (2 2 3 3 ) "(?:(?:\t\\.\\.\\.\\s*\n)*\\1\\(\\2:\\d+\\)\\s*\n)+", //a b b ( ) a "$1($2:$3)\n\t...\n"); // If we put in two or more successive ellipsises, compress to one. st = st.replaceAll("(\t\\.\\.\\.\\s*\n){2,}", "\t...\n"); // Get rid of dangling ellipsis at the end. st = st.replaceAll("\t\\.\\.\\.\\s*\n\\s*$", ""); // Get rid of blank lines. st = st.replaceAll("\n\\s*\n", "\n"); // Move ellipsis ... to the end of preceding frame line for final format. st = st.replaceAll("(\\s*at .*?)\\s*\n\t\\.\\.\\.\\s*\n", "$1 ...\n"); // Put a blank line at the beginning to set off the stacktrace. st = StringUtil.strcat("\n", st); return st; } /** Get a string which represents the Throwable's call stack. @param t the Throwable to get the stack trace from @return a string which represents the Throwable's call stack @aribaapi documented */ public static String stackTrace (Throwable t) { StringWriter stringWriter = new StringWriter(); PrintWriter printWriter = new PrintWriter(stringWriter); t.printStackTrace(printWriter); printWriter.close(); try { stringWriter.close(); } catch (IOException e) { // Sun changed StringWriter to throw IOException in // JDK 1.2. Thank you. Assert.that(false, "IOException in SystemUtil.stackTrace"); } return stringWriter.toString(); } /** Returns the root cause that cause that causes this exception. @param t the Throwable instance whose root cause is to be returned. If this is null, then <code>null</code> is returned. @return the root cause that cause that causes this exception, can be <code>null</code> @aribaapi documented */ public static Throwable getRootCause (Throwable t) { if (t == null) { return null; } Throwable cause = t.getCause(); while (cause != null) { t = cause; cause = t.getCause(); } return t; } private static Map _environment; /** For internal use only @return the complete current environment @aribaapi ariba */ public static synchronized Map getenv () { if (_environment == null) { try { _environment = MapUtil.map(); String[] cmdArray; if (isWin32()) { cmdArray = new String[] { "cmd", "/c", "set"}; } else { cmdArray = new String[] { "env" }; } Process process = Runtime.getRuntime().exec(cmdArray); InputStream input = process.getInputStream(); String line = IOUtil.readLine(input); while (line != null) { int posEqual = line.indexOf('='); if (posEqual == -1) { Log.util.debug("Wrong format from env : %s", line); } else { String key = line.substring(0, posEqual); if (isWin32()) { key = key.toUpperCase(); } String value = line.substring(posEqual + 1); _environment.put(key, value); } line = IOUtil.readLine(input); } } catch (IOException e) { Log.util.debug("Unexpected IOException : %s", e); } } return _environment; } /** For internal use only @param envVar the name of the requested environment variable. Must not be null. @return the value of the requested environment variable. Null if the environment variable does not exist @aribaapi ariba */ public static synchronized String getenv (String envVar) { if (_environment == null) { getenv(); } if (isWin32()) { envVar = envVar.toUpperCase(); } return (String)_environment.get(envVar); } // Control flag to run server under J2EEServer process private static boolean J2EEServerInUse = true; public static void setJ2EEServerInUse () { J2EEServerInUse = true; } public static void setJ2EEServerInUse (boolean flag) { J2EEServerInUse = flag; } public static boolean usingJ2EEServer () { return J2EEServerInUse; } /* Support for non standard names for ariba and config directories */ /** Defines the location of the system directory. @param systemDir path of the system directory. Cannot be null. @aribaapi ariba @see #getSystemDirectory */ public static final void setSystemDirectory (String systemDir) { Assert.that(systemDir != null, "systemDir is null !"); SystemDirectoryString = systemDir; SystemDirectory = new File(FileUtil.fixFileSeparators(systemDir)); } /** Defines the location of the config directory. @param configDir path of the config directory. Cannot be null. @aribaapi ariba @see #getConfigDirectory */ public static final void setConfigDirectory (String configDir) { Assert.that(configDir != null, "configDir is null !"); ConfigDirectoryString = configDir; ConfigDirectory = new File(FileUtil.fixFileSeparators(configDir)); ConfigURL = URLUtil.urlAbsolute(ConfigDirectory); } /** Defines the location of the local temporary directory. @param tempDir path of the temp directory. Cannot be null. @aribaapi ariba @see #getLocalTempDirectory */ public static final void setLocalTempDirectory (String tempDir) { Assert.that(tempDir != null, "tempDir is null !"); LocalTempDirectory = new File(FileUtil.fixFileSeparators(tempDir)); try { FileUtil.directory(LocalTempDirectory); } catch (IOException e) { Log.util.warning(8905, SystemUtil.stackTrace(e)); } } /** Defines the location of the shared temporary directory. @param tempDir path of the temp directory. Cannot be null. @aribaapi ariba @see #getSharedTempDirectory */ public static final void setSharedTempDirectory (String tempDir) { Assert.that(tempDir != null, "tempDir is null !"); SharedTempDirectory = new File(FileUtil.fixFileSeparators(tempDir)); try { FileUtil.directory(SharedTempDirectory); } catch (IOException e) { Log.util.warning(8905, SystemUtil.stackTrace(e)); } } /** Provide default setup for system directory if needed. This provides lazy initialization since we want to avoid calls at static initialization that could result in logging. There have been bugs due to logging static initialization recursion. @aribaapi private */ private static final void assureSystemDirectory () { if (SystemDirectoryString == null) { // no need to synchronize here--the last one wins setSystemDirectory(Constants.getDefaultSystemDir()); } } /** Provide default setup for configuration directory if needed. This provides lazy initialization since we want to avoid calls at static initialization that could result in logging. There have been bugs due to logging static initialization recursion. @aribaapi private */ private static final void assureConfigDirectory () { if (ConfigDirectoryString == null) { // no need to synchronize here--the last one wins setConfigDirectory(Constants.getDefaultConfigDir()); } } /** Provide default setup for configuration directory if needed. This provides lazy initialization since we want to avoid calls at static initialization that could result in logging. There have been bugs due to logging static initialization recursion. @aribaapi private */ private static final void assureTempDirectory () { if (LocalTempDirectory == null) { setLocalTempDirectory(Constants.getDefaultTempDirectory()); } if (SharedTempDirectory == null) { setSharedTempDirectory(Constants.getDefaultTempDirectory()); } } /** Returns the sytem directory. @return the sytem directory. @aribaapi documented */ public static File getSystemDirectory () { assureSystemDirectory(); return SystemDirectory; } /** Returns the config directory. @return the config directory. @aribaapi documented */ public static File getConfigDirectory () { assureConfigDirectory(); return ConfigDirectory; } /** @return the config directory name as a string @aribaapi documented */ public static String getConfigDirectoryString () { assureConfigDirectory(); return ConfigDirectoryString; } /** Returns the config URL. @return the config URL. @aribaapi documented */ public static URL getConfigURL () { assureConfigDirectory(); return ConfigURL; } /** Returns the temporary directory for the local server This directory can be used to store temporary files which do not need to be shared accross multiple processes @return the local temporary directory @aribaapi documented */ public static File getLocalTempDirectory () { assureTempDirectory(); return LocalTempDirectory; } /** Returns the temporary directory which can be shared accross multiple processes This directory can be used to store temporary files which require to be accessible by different nodes @return the shared temporary directory @aribaapi documented */ public static File getSharedTempDirectory () { assureTempDirectory(); return SharedTempDirectory; } /** Modify a relative file path to use custom config and directory locations @aribaapi ariba */ public static String fixRelativePath (String path) { if (path != null) { //Make sure path is relative if (!path.startsWith("/")) { if (path.startsWith("ariba/")) { path = StringUtil.strcat( SystemDirectoryString, path.substring(path.indexOf("/"))); } else if (path.startsWith("config/")) { path = StringUtil.strcat( ConfigDirectoryString, path.substring(path.indexOf("/"))); } } return path; } return null; } /** Returns the installation directory. Note: this might return am empty abstract path File, so be prepared to deal with that. On Mac, unix, and windows it should be correct, though. @return the installation directory or an empty abstract path @aribaapi ariba */ public static File getInstallDirectory () { return InstallDirectory; } /** Are we in development, not production mode? This is a low level check needed for internal testing. Where possible clients should use the higher level isProduction method defined in ServerInterface. @return true if development, false if production @aribaapi ariba @see ariba.util.core.ServerInterface#isProduction() */ public static boolean isDevelopment () { return IsDevelopment; } /** Does the install directory have an internal subdirectory? @ return true if <install>/internal exists, false otherwise @aribaapi private */ private static boolean hasInternalDirectory () { return InternalDirectory.isDirectory(); } /** Returns the internal directory. Can be null. @return the internal directory, or <code>null</code> if the internal directory does not exist. @aribaapi ariba */ public static File getInternalDirectory () { return InternalDirectory.isDirectory() ? InternalDirectory : null; } /** * Validates that a method is allowed to call another method. * @param validCallers an array of valid callers. See the ValidCaller * constructor for details on what an entry looks like. * To improve this method's performance, valid callers that require * a search through the full stack should be at the end of the array. * @param fatalAssert if true, an invalid caller causes a fatal assert, otherwise * a non-fatal assert. * @aribaapi private */ public static void validateCaller (ValidCaller[] validCallers, boolean fatalAssert) { /** * The index in the stack array to start looking for a valid caller. Indexes are: * 0 - Thread.getStackTrace() * 1 - This method * 2 - This method's caller which is the method requesting validation of its caller. * 3 - Caller of the requesting method. This is the caller we want to validate. */ final int startIndex = 3; boolean found = false; StackTraceElement[] stes = Thread.currentThread().getStackTrace(); Assert.that(stes.length > startIndex, "No caller on stack to validate"); for (ValidCaller validCaller : validCallers) { // If not a full stack search, we only look at one method up the stack. int endIndex = validCaller._checkFullStack ? stes.length : startIndex + 1; for (int i = startIndex; i < endIndex; i++) { StackTraceElement ste = stes[i]; String className = ste.getClassName(); String methodName = ste.getMethodName(); if (validCaller._classNameIsPrefix) { // Just need to check if the class name starts with validCaller // class name (which is probably just a package name). if (className.startsWith(validCaller._className) && (validCaller._methodName == null || validCaller._methodName.equals(methodName))) { found = true; break; } } else if (validCaller._className.equals(className) && (validCaller._methodName == null || validCaller._methodName.equals(methodName))) { found = true; break; } } } if (!found) { String invalidClassName = stes[startIndex].getClassName(); String invalidMethodName = stes[startIndex].getMethodName(); String requestClassName = stes[startIndex-1].getClassName(); String requestMethodName = stes[startIndex-1].getMethodName(); String msg = "validateCaller method %s.%s is not allowed to call %s.%s"; if (fatalAssert) { Assert.that(false, msg, invalidClassName, invalidMethodName, requestClassName, requestMethodName); } else { Assert.assertNonFatal(false, msg, invalidClassName, invalidMethodName, requestClassName, requestMethodName); } } } /** * A simple bean to hold the attributes of a valid caller for use by validateCaller * method. See constructor for details. * @aribaapi private */ public static class ValidCaller { public final String _className; public final String _methodName; public final boolean _classNameIsPrefix; public final boolean _checkFullStack; /** * Definition of a valid caller. * @param className the full class name (package name plus class name). * If classNameIsPrefix is true this is a prefix for the class * name, typically a package name. * @param methodName the method name. If null, any method in class is valid. * Note that method signature is not checked. * @param classNameIsPrefix when true the className is a prefix. Any full class * name that starts with the className prefix is valid. * Typically this is used to validate an entire package. * @param checkFullStack if true, the full stack is searched for a match. * This is useful when there is unknown methods (such as * reflection methods) on the stack before the valid method. * If false, only the immediately caller of the requesting * method is checked. * @aribaapi private */ public ValidCaller (String className, String methodName, boolean classNameIsPrefix, boolean checkFullStack) { _className = className; _methodName = methodName; _classNameIsPrefix = classNameIsPrefix; _checkFullStack = checkFullStack; } } }
apache-2.0
quarkusio/quarkus
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/cors/CORSHandlerTestCase.java
2235
package io.quarkus.vertx.http.cors; import static io.restassured.RestAssured.given; import static org.hamcrest.core.Is.is; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import io.quarkus.test.QuarkusUnitTest; public class CORSHandlerTestCase { @RegisterExtension static QuarkusUnitTest runner = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(BeanRegisteringRoute.class) .addAsResource("conf/cors-config.properties", "application.properties")); @Test @DisplayName("Handles a preflight CORS request correctly") public void corsPreflightTestServlet() { String origin = "http://custom.origin.quarkus"; String methods = "GET,POST"; String headers = "X-Custom"; given().header("Origin", origin) .header("Access-Control-Request-Method", methods) .header("Access-Control-Request-Headers", headers) .when() .options("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", methods) .header("Access-Control-Allow-Credentials", "true") .header("Access-Control-Allow-Headers", headers); } @Test @DisplayName("Handles a direct CORS request correctly") public void corsNoPreflightTestServlet() { String origin = "http://custom.origin.quarkus"; String methods = "GET,POST"; String headers = "X-Custom"; given().header("Origin", origin) .header("Access-Control-Request-Method", methods) .header("Access-Control-Request-Headers", headers) .when() .get("/test").then() .statusCode(200) .header("Access-Control-Allow-Origin", origin) .header("Access-Control-Allow-Methods", methods) .header("Access-Control-Allow-Headers", headers) .header("Access-Control-Allow-Credentials", "true") .body(is("test route")); } }
apache-2.0
tensorflow/datasets
tensorflow_datasets/core/deprecated/text/text_encoder.py
16222
# coding=utf-8 # Copyright 2022 The TensorFlow Datasets Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # coding=utf-8 """TextEncoders convert between text and integers.""" from __future__ import unicode_literals import abc import hashlib import json import re import six import tensorflow as tf def _re_compile(pattern): return re.compile(pattern, flags=re.UNICODE) # pytype: disable=wrong-keyword-args NUM_BYTES = 2**8 ALPHANUM_REGEX = _re_compile(r"\W+") ALL_REGEX = _re_compile(r"(\W+)") class TextEncoderConfig(object): """Configuration for `tfds.features.Text`.""" def __init__(self, encoder=None, encoder_cls=None, vocab_size=None, name=None): if encoder: if (encoder_cls or vocab_size): raise ValueError("If encoder is provided, encoder_cls and " "vocab_size must be None") encoder_cls = type(encoder) vocab_size = encoder.vocab_size else: if encoder_cls is ByteTextEncoder: encoder = encoder_cls() self.encoder = encoder self.encoder_cls = encoder_cls self.vocab_size = vocab_size self.name = name @six.add_metaclass(abc.ABCMeta) class TextEncoder(object): """Abstract base class for converting between text and integers. **A note on padding**: Because text data is typically variable length and nearly always requires padding during training, ID 0 is always reserved for padding. To accommodate this, all `TextEncoder`s behave in certain ways: * `encode`: never returns id 0 (all ids are 1+) * `decode`: drops 0 in the input ids * `vocab_size`: includes ID 0 New subclasses should be careful to match this behavior. """ @abc.abstractmethod def encode(self, s): """Encodes text into a list of integers.""" raise NotImplementedError @abc.abstractmethod def decode(self, ids): """Decodes a list of integers into text.""" raise NotImplementedError @abc.abstractproperty def vocab_size(self): """Size of the vocabulary. Decode produces ints [1, vocab_size).""" raise NotImplementedError @abc.abstractmethod def save_to_file(self, filename_prefix): """Store to file. Inverse of load_from_file.""" raise NotImplementedError @classmethod @abc.abstractmethod def load_from_file(cls, filename_prefix): # pylint: disable=no-self-argument """Load from file. Inverse of save_to_file.""" raise NotImplementedError @classmethod def _write_lines_to_file(cls, filename, lines, metadata_dict=None): """Writes lines to file prepended by header and metadata.""" write_lines_to_file(cls.__name__, filename, lines, metadata_dict) @classmethod def _read_lines_from_file(cls, filename): return read_lines_from_file(cls.__name__, filename) def __repr__(self): return "<%s vocab_size=%d>" % (type(self).__name__, self.vocab_size) class ByteTextEncoder(TextEncoder): """Byte-encodes text.""" def __init__(self, additional_tokens=None): """Constructs ByteTextEncoder. Args: additional_tokens: `list<str>`, list of additional tokens. These will be assigned vocab ids `[1, 1+len(additional_tokens)]`. Useful for things like "end-of-string" tokens (e.g. "<EOS>"). """ self._additional_tokens, self._additional_tokens_re = ( _prepare_reserved_tokens(additional_tokens)) # Note that internally everything is 0-indexed. Padding is dealt with at the # end of encode and the beginning of decode. self._additional_token_to_id = dict( zip(self._additional_tokens, range(len(self._additional_tokens)))) def encode(self, s): if not self.additional_tokens: return pad_incr(list(bytearray(tf.compat.as_bytes(s)))) # Handle additional tokens s = tf.compat.as_text(s) ids = [] for substr in self._additional_tokens_re.split(s): if not substr: continue tok_id = self._additional_token_to_id.get(substr) if tok_id is None: offset = len(self.additional_tokens) tok_ids = [ i + offset for i in list(bytearray(tf.compat.as_bytes(substr))) ] else: tok_ids = [tok_id] ids.extend(tok_ids) return pad_incr(ids) def decode(self, ids): ids = pad_decr(ids) if not self.additional_tokens: return tf.compat.as_text(bytes(bytearray(ids))) # Handle additional tokens # First pass picks out the additional tokens tmp_decoded = [] for byte_id in ids: is_additional_token = byte_id < len(self.additional_tokens) if is_additional_token: tmp_decoded.append(self.additional_tokens[byte_id]) else: # Leave these as ints so that we can contiguously decode bytes # afterwards tmp_decoded.append(byte_id - len(self.additional_tokens)) # Second pass to decode contiguous bytes strs = [] i = 0 while i < len(tmp_decoded): el = tmp_decoded[i] if isinstance(el, six.string_types): strs.append(el) i += 1 else: # Decode contiguous bytes byte_ids = [] while i < len(tmp_decoded): b = tmp_decoded[i] if isinstance(b, int): byte_ids.append(b) i += 1 else: break strs.append(bytes(bytearray(byte_ids)).decode("utf-8", "replace")) return "".join(strs) @property def vocab_size(self): # Plus 1 for pad return len(self.additional_tokens) + NUM_BYTES + 1 @property def additional_tokens(self): return self._additional_tokens @classmethod def _filename(cls, filename_prefix): return filename_prefix + ".bytes" def save_to_file(self, filename_prefix): self._write_lines_to_file( self._filename(filename_prefix), self.additional_tokens) @classmethod def load_from_file(cls, filename_prefix): lines, _ = cls._read_lines_from_file(cls._filename(filename_prefix)) return cls(additional_tokens=lines) class TokenTextEncoder(TextEncoder): r"""TextEncoder backed by a list of tokens. Tokenization splits on (and drops) non-alphanumeric characters with regex "\W+". """ def __init__(self, vocab_list, oov_buckets=1, oov_token="UNK", lowercase=False, tokenizer=None, strip_vocab=True, decode_token_separator=" "): """Constructs a TokenTextEncoder. To load from a file saved with `TokenTextEncoder.save_to_file`, use `TokenTextEncoder.load_from_file`. Args: vocab_list: `list<str>`, list of tokens. oov_buckets: `int`, the number of `int`s to reserve for OOV hash buckets. Tokens that are OOV will be hash-modded into a OOV bucket in `encode`. oov_token: `str`, the string to use for OOV ids in `decode`. lowercase: `bool`, whether to make all text and tokens lowercase. tokenizer: `Tokenizer`, responsible for converting incoming text into a list of tokens. strip_vocab: `bool`, whether to strip whitespace from the beginning and end of elements of `vocab_list`. decode_token_separator: `str`, the string used to separate tokens when decoding. """ self._vocab_list = [tf.compat.as_text(el) for el in vocab_list] if strip_vocab: self._vocab_list = [el.strip() for el in self._vocab_list] self._lowercase = lowercase if self._lowercase: self._vocab_list = [t.lower() for t in self._vocab_list] # Note that internally everything is 0-indexed. Padding is dealt with at the # end of encode and the beginning of decode. self._token_to_id = dict( zip(self._vocab_list, range(len(self._vocab_list)))) self._oov_buckets = oov_buckets self._oov_token = tf.compat.as_text(oov_token) # Reserved tokens are all tokens that are mixed alphanum and non-alphanum. reserved_tokens = [t for t in self._vocab_list if is_mixed_alphanum(t)] self._tokenizer = (tokenizer or Tokenizer(reserved_tokens=reserved_tokens)) self._user_defined_tokenizer = tokenizer self._decode_token_separator = decode_token_separator def encode(self, s): s = tf.compat.as_text(s) if self.lowercase: s = s.lower() ids = [] for token in self._tokenizer.tokenize(s): int_id = self._token_to_id.get(token, -1) if int_id < 0: int_id = self._oov_bucket(token) if int_id is None: raise ValueError("Out of vocabulary token %s" % token) ids.append(int_id) # Increment for pad id 0 return pad_incr(ids) def decode(self, ids): ids = pad_decr(ids) tokens = [] for int_id in ids: if int_id < len(self._vocab_list): tokens.append(self._vocab_list[int_id]) else: tokens.append(self._oov_token) return self._decode_token_separator.join(tokens) @property def vocab_size(self): # Plus 1 for pad return len(self._vocab_list) + self._oov_buckets + 1 @property def tokens(self): return list(self._vocab_list) @property def oov_token(self): return self._oov_token @property def lowercase(self): return self._lowercase @property def tokenizer(self): return self._tokenizer def _oov_bucket(self, token): if self._oov_buckets <= 0: return None if self._oov_buckets == 1: return len(self._vocab_list) hash_val = int(hashlib.md5(tf.compat.as_bytes(token)).hexdigest(), 16) return len(self._vocab_list) + hash_val % self._oov_buckets @classmethod def _filename(cls, filename_prefix): return filename_prefix + ".tokens" def save_to_file(self, filename_prefix): filename = self._filename(filename_prefix) kwargs = { "oov_buckets": self._oov_buckets, "lowercase": self._lowercase, "oov_token": self._oov_token, } if self._user_defined_tokenizer is not None: self._tokenizer.save_to_file(filename) kwargs["has_tokenizer"] = True self._write_lines_to_file(filename, self._vocab_list, kwargs) @classmethod def load_from_file(cls, filename_prefix): filename = cls._filename(filename_prefix) vocab_lines, kwargs = cls._read_lines_from_file(filename) has_tokenizer = kwargs.pop("has_tokenizer", False) if has_tokenizer: kwargs["tokenizer"] = Tokenizer.load_from_file(filename) return cls(vocab_list=vocab_lines, **kwargs) class Tokenizer(object): """Splits a string into tokens, and joins them back.""" def __init__(self, alphanum_only=True, reserved_tokens=None): """Constructs a Tokenizer. Note that the Tokenizer is invertible if `alphanum_only=False`. i.e. `s == t.join(t.tokenize(s))`. Args: alphanum_only: `bool`, if `True`, only parse out alphanumeric tokens (non-alphanumeric characters are dropped); otherwise, keep all characters (individual tokens will still be either all alphanumeric or all non-alphanumeric). reserved_tokens: `list<str>`, a list of strings that, if any are in `s`, will be preserved as whole tokens, even if they contain mixed alphanumeric/non-alphanumeric characters. """ self._alphanum_only = alphanum_only reserved_tokens, self._reserved_tokens_re = _prepare_reserved_tokens( reserved_tokens) self._reserved_tokens = set(reserved_tokens) @property def alphanum_only(self): return self._alphanum_only @property def reserved_tokens(self): return self._reserved_tokens def tokenize(self, s): """Splits a string into tokens.""" s = tf.compat.as_text(s) if self.reserved_tokens: # First split out the reserved tokens substrs = self._reserved_tokens_re.split(s) else: substrs = [s] toks = [] for substr in substrs: if substr in self.reserved_tokens: toks.append(substr) elif self._alphanum_only: toks.extend(ALPHANUM_REGEX.split(substr)) else: toks.extend(ALL_REGEX.split(substr)) # Filter out empty strings toks = [t for t in toks if t] return toks def join(self, tokens): """Joins tokens into a string.""" if self._alphanum_only: return " ".join(tokens) else: # Fully invertible return "".join(tokens) @classmethod def _filename(cls, filename_prefix): return filename_prefix + ".tokenizer" def save_to_file(self, filename_prefix): filename = self._filename(filename_prefix) kwargs = { "reserved_tokens": list(self._reserved_tokens), "alphanum_only": self._alphanum_only } write_lines_to_file(type(self).__name__, filename, [], kwargs) @classmethod def load_from_file(cls, filename_prefix): filename = cls._filename(filename_prefix) _, kwargs = read_lines_from_file(cls.__name__, filename) return cls(**kwargs) def pad_decr(ids): """Strip ID 0 and decrement ids by 1.""" if len(ids) < 1: return list(ids) if not any(ids): return [] # all padding. idx = -1 while not ids[idx]: idx -= 1 if idx == -1: ids = ids # pylint: disable=self-assigning-variable else: ids = ids[:idx + 1] return [i - 1 for i in ids] def pad_incr(ids): """Add 1 to ids to account for pad.""" return [i + 1 for i in ids] def _prepare_reserved_tokens(reserved_tokens): """Prepare reserved tokens and a regex for splitting them out of strings.""" reserved_tokens = [tf.compat.as_text(tok) for tok in reserved_tokens or []] dups = _find_duplicates(reserved_tokens) if dups: raise ValueError("Duplicates found in tokens: %s" % dups) reserved_tokens_re = _make_reserved_tokens_re(reserved_tokens) return reserved_tokens, reserved_tokens_re def _re_escape(s): """Escape regex control characters.""" escaped = re.sub(r"[(){}\[\].*?|^$\\+-]", r"\\\g<0>", s) return escaped def _make_reserved_tokens_re(reserved_tokens): """Constructs compiled regex to parse out reserved tokens.""" if not reserved_tokens: return None escaped_tokens = [_re_escape(rt) for rt in reserved_tokens] pattern = "(%s)" % "|".join(escaped_tokens) reserved_tokens_re = _re_compile(pattern) return reserved_tokens_re def _find_duplicates(els): seen = set() dups = [] for x in els: if x in seen: dups.append(x) else: seen.add(x) return dups def is_mixed_alphanum(token): return len([s for s in ALL_REGEX.split(token) if s]) > 1 _HEADER_PREFIX = "### " _METADATA_PREFIX = "### Metadata: " def write_lines_to_file(cls_name, filename, lines, metadata_dict): """Writes lines to file prepended by header and metadata.""" metadata_dict = metadata_dict or {} header_line = "%s%s" % (_HEADER_PREFIX, cls_name) metadata_line = "%s%s" % (_METADATA_PREFIX, json.dumps(metadata_dict, sort_keys=True)) with tf.io.gfile.GFile(filename, "wb") as f: for line in [header_line, metadata_line]: f.write(tf.compat.as_bytes(line)) f.write(tf.compat.as_bytes("\n")) if lines: f.write(tf.compat.as_bytes("\n".join(lines))) f.write(tf.compat.as_bytes("\n")) def read_lines_from_file(cls_name, filename): """Read lines from file, parsing out header and metadata.""" with tf.io.gfile.GFile(filename, "rb") as f: lines = [tf.compat.as_text(line)[:-1] for line in f] header_line = "%s%s" % (_HEADER_PREFIX, cls_name) if lines[0] != header_line: raise ValueError("File {fname} does not seem to have been created from " "{name}.save_to_file.".format( fname=filename, name=cls_name)) metadata_dict = json.loads(lines[1][len(_METADATA_PREFIX):]) return lines[2:], metadata_dict
apache-2.0
egordon/CoinJoin
src/org/coinjoin/util/RSABlindSignUtil.java
2959
package org.coinjoin.util; import java.math.BigInteger; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.PublicKey; import java.security.SecureRandom; import java.security.interfaces.RSAPrivateKey; import java.security.interfaces.RSAPublicKey; public class RSABlindSignUtil { private static SecureRandom random; private static KeyPairGenerator kpg; static { // Initialize RSA KeyPair Generator and RNG random = new SecureRandom(); try { kpg = KeyPairGenerator.getInstance("RSA"); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); System.exit(1); } kpg.initialize(2048, new SecureRandom()); } public static KeyPair freshRSAKeyPair() { return kpg.genKeyPair(); } /** * Given m, calculates (m^d)modN * @param p: RSA Private Key * @param blinded: Raw Byte Array representing "m" * @return */ public static byte[] signData(PrivateKey p, byte[] blinded) { BigInteger data = new BigInteger(1, blinded); RSAPrivateKey privKey = (RSAPrivateKey)p; BigInteger sig = data.modPow(privKey.getPrivateExponent(), privKey.getModulus()); return sig.toByteArray(); } public static boolean verifyData(PublicKey p, byte[] data, byte[] signature) { RSAPublicKey pub = (RSAPublicKey)p; BigInteger s = new BigInteger(1, signature); BigInteger e = pub.getPublicExponent(); BigInteger n = pub.getModulus(); BigInteger m = new BigInteger(1, data); return s.modPow(e, n).equals(m); } public static RSABlindedData blindData(PublicKey p, byte[] data) { RSAPublicKey pub = (RSAPublicKey)p; byte[] rand = new byte[pub.getModulus().bitLength() / 8]; BigInteger r = BigInteger.ONE; while(!r.gcd(pub.getModulus()).equals(BigInteger.ONE) || r.equals(BigInteger.ONE) || r.equals(pub.getModulus())) { random.nextBytes(rand); r = new BigInteger(1, rand); } BigInteger bData = ((r.modPow(pub.getPublicExponent(),pub.getModulus())) .multiply(new BigInteger(1, data))).mod(pub.getModulus()); return new RSABlindedData(r, bData.toByteArray()); } public static byte[] unblindSignature(PublicKey p, RSABlindedData bData, byte[] bSig) { RSAPublicKey pub = (RSAPublicKey)p; BigInteger sig = bData.GetMultiplier().modInverse(pub.getModulus()) .multiply(new BigInteger(1, bSig)).mod(pub.getModulus()); return sig.toByteArray(); } public static void main(String[] args) { KeyPair keys1 = RSABlindSignUtil.freshRSAKeyPair(); KeyPair keys2 = RSABlindSignUtil.freshRSAKeyPair(); byte[] data = new byte[256]; for(int i = 0; i < data.length; i++) { data[i] = (byte)i; } byte[] signature = RSABlindSignUtil.signData(keys2.getPrivate(), data); boolean good = RSABlindSignUtil.verifyData(keys2.getPublic(), data, signature); if(good) System.out.println("Yay! It worked!"); else System.out.println("Boo! It did not work!"); } }
apache-2.0
AndroidBoy/BAMService
manager/com/kl/categorypro/CategoryProDAO.java
4386
package com.kl.categorypro; import java.util.Date; import java.util.List; import org.hibernate.LockMode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.ApplicationContext; import org.springframework.orm.hibernate3.support.HibernateDaoSupport; /** * A data access object (DAO) providing persistence and search support for * CategoryPro entities. Transaction control of the save(), update() and * delete() operations can directly support Spring container-managed * transactions or they can be augmented to handle user-managed Spring * transactions. Each of these methods provides additional information for how * to configure it for the desired type of transaction control. * * @see com.kl.categorypro.CategoryPro * @author MyEclipse Persistence Tools */ public class CategoryProDAO extends HibernateDaoSupport { private static final Logger log = LoggerFactory .getLogger(CategoryProDAO.class); // property constants public static final String CATEGORY_PNAME = "categoryPName"; public static final String USER_ID = "userId"; protected void initDao() { // do nothing } public void save(CategoryPro transientInstance) { log.debug("saving CategoryPro instance"); try { getHibernateTemplate().save(transientInstance); log.debug("save successful"); } catch (RuntimeException re) { log.error("save failed", re); throw re; } } public void delete(CategoryPro persistentInstance) { log.debug("deleting CategoryPro instance"); try { getHibernateTemplate().delete(persistentInstance); log.debug("delete successful"); } catch (RuntimeException re) { log.error("delete failed", re); throw re; } } public CategoryPro findById(Short id) { log.debug("getting CategoryPro instance with id: " + id); try { CategoryPro instance = (CategoryPro) getHibernateTemplate().get( "com.kl.categorypro.CategoryPro", id); return instance; } catch (RuntimeException re) { log.error("get failed", re); throw re; } } public List findByExample(CategoryPro instance) { log.debug("finding CategoryPro instance by example"); try { List results = getHibernateTemplate().findByExample(instance); log.debug("find by example successful, result size: " + results.size()); return results; } catch (RuntimeException re) { log.error("find by example failed", re); throw re; } } public List findByProperty(String propertyName, Object value) { log.debug("finding CategoryPro instance with property: " + propertyName + ", value: " + value); try { String queryString = "from CategoryPro as model where model." + propertyName + "= ?"; return getHibernateTemplate().find(queryString, value); } catch (RuntimeException re) { log.error("find by property name failed", re); throw re; } } public List findByCategoryPName(Object categoryPName) { return findByProperty(CATEGORY_PNAME, categoryPName); } public List findByUserId(Object userId) { return findByProperty(USER_ID, userId); } public List findAll() { log.debug("finding all CategoryPro instances"); try { String queryString = "from CategoryPro"; return getHibernateTemplate().find(queryString); } catch (RuntimeException re) { log.error("find all failed", re); throw re; } } public CategoryPro merge(CategoryPro detachedInstance) { log.debug("merging CategoryPro instance"); try { CategoryPro result = (CategoryPro) getHibernateTemplate().merge( detachedInstance); log.debug("merge successful"); return result; } catch (RuntimeException re) { log.error("merge failed", re); throw re; } } public void attachDirty(CategoryPro instance) { log.debug("attaching dirty CategoryPro instance"); try { getHibernateTemplate().saveOrUpdate(instance); log.debug("attach successful"); } catch (RuntimeException re) { log.error("attach failed", re); throw re; } } public void attachClean(CategoryPro instance) { log.debug("attaching clean CategoryPro instance"); try { getHibernateTemplate().lock(instance, LockMode.NONE); log.debug("attach successful"); } catch (RuntimeException re) { log.error("attach failed", re); throw re; } } public static CategoryProDAO getFromApplicationContext( ApplicationContext ctx) { return (CategoryProDAO) ctx.getBean("CategoryProDAO"); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-eventbridge/src/main/java/com/amazonaws/services/eventbridge/model/transform/RunCommandParametersJsonUnmarshaller.java
2893
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.eventbridge.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.eventbridge.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * RunCommandParameters JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class RunCommandParametersJsonUnmarshaller implements Unmarshaller<RunCommandParameters, JsonUnmarshallerContext> { public RunCommandParameters unmarshall(JsonUnmarshallerContext context) throws Exception { RunCommandParameters runCommandParameters = new RunCommandParameters(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("RunCommandTargets", targetDepth)) { context.nextToken(); runCommandParameters.setRunCommandTargets(new ListUnmarshaller<RunCommandTarget>(RunCommandTargetJsonUnmarshaller.getInstance()) .unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return runCommandParameters; } private static RunCommandParametersJsonUnmarshaller instance; public static RunCommandParametersJsonUnmarshaller getInstance() { if (instance == null) instance = new RunCommandParametersJsonUnmarshaller(); return instance; } }
apache-2.0
First-Peoples-Cultural-Council/fv-web-ui
frontend/app_v2/src/i18n.js
507
import i18n from 'i18next' import { initReactI18next } from 'react-i18next' // FPCC import en from 'assets/locale/en' i18n // pass the i18n instance to react-i18next. .use(initReactI18next) // init i18next - for all options read: https://www.i18next.com/overview/configuration-options .init({ resources: { en, }, fallbackLng: 'en', debug: false, interpolation: { escapeValue: false, // not needed for react as it escapes by default }, }) export default i18n
apache-2.0
sudheeshkatkam/drill
exec/java-exec/src/test/java/org/apache/drill/test/rowSet/test/RowSetTest.java
14022
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.test.rowSet.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import org.apache.drill.common.types.TypeProtos.DataMode; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.exec.record.BatchSchema; import org.apache.drill.exec.vector.accessor.ArrayReader; import org.apache.drill.exec.vector.accessor.ArrayWriter; import org.apache.drill.exec.vector.accessor.TupleAccessor.TupleSchema; import org.apache.drill.test.OperatorFixture; import org.apache.drill.test.rowSet.RowSet.ExtendableRowSet; import org.apache.drill.test.rowSet.RowSet.RowSetReader; import org.apache.drill.test.rowSet.RowSet.RowSetWriter; import org.apache.drill.test.rowSet.RowSet.SingleRowSet; import org.apache.drill.test.rowSet.RowSetComparison; import org.apache.drill.test.rowSet.RowSetSchema; import org.apache.drill.test.rowSet.RowSetSchema.FlattenedSchema; import org.apache.drill.test.rowSet.RowSetSchema.PhysicalSchema; import org.apache.drill.test.rowSet.SchemaBuilder; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.base.Splitter; public class RowSetTest { private static OperatorFixture fixture; @BeforeClass public static void setUpBeforeClass() throws Exception { fixture = OperatorFixture.standardFixture(); } @AfterClass public static void tearDownAfterClass() throws Exception { fixture.close(); } /** * Test a simple physical schema with no maps. */ @Test public void testSchema() { BatchSchema batchSchema = new SchemaBuilder() .add("c", MinorType.INT) .add("a", MinorType.INT, DataMode.REPEATED) .addNullable("b", MinorType.VARCHAR) .build(); assertEquals("c", batchSchema.getColumn(0).getName()); assertEquals("a", batchSchema.getColumn(1).getName()); assertEquals("b", batchSchema.getColumn(2).getName()); RowSetSchema schema = new RowSetSchema(batchSchema); TupleSchema access = schema.hierarchicalAccess(); assertEquals(3, access.count()); crossCheck(access, 0, "c", MinorType.INT); assertEquals(DataMode.REQUIRED, access.column(0).getDataMode()); assertEquals(DataMode.REQUIRED, access.column(0).getType().getMode()); assertTrue(! access.column(0).isNullable()); crossCheck(access, 1, "a", MinorType.INT); assertEquals(DataMode.REPEATED, access.column(1).getDataMode()); assertEquals(DataMode.REPEATED, access.column(1).getType().getMode()); assertTrue(! access.column(1).isNullable()); crossCheck(access, 2, "b", MinorType.VARCHAR); assertEquals(MinorType.VARCHAR, access.column(2).getType().getMinorType()); assertEquals(DataMode.OPTIONAL, access.column(2).getDataMode()); assertEquals(DataMode.OPTIONAL, access.column(2).getType().getMode()); assertTrue(access.column(2).isNullable()); // No maps: physical schema is the same as access schema. PhysicalSchema physical = schema.physical(); assertEquals(3, physical.count()); assertEquals("c", physical.column(0).field().getName()); assertEquals("a", physical.column(1).field().getName()); assertEquals("b", physical.column(2).field().getName()); } public void crossCheck(TupleSchema schema, int index, String fullName, MinorType type) { String name = null; for (String part : Splitter.on(".").split(fullName)) { name = part; } assertEquals(name, schema.column(index).getName()); assertEquals(index, schema.columnIndex(fullName)); assertSame(schema.column(index), schema.column(fullName)); assertEquals(type, schema.column(index).getType().getMinorType()); } @Test public void testMapSchema() { BatchSchema batchSchema = new SchemaBuilder() .add("c", MinorType.INT) .addMap("a") .addNullable("b", MinorType.VARCHAR) .add("d", MinorType.INT) .addMap("e") .add("f", MinorType.VARCHAR) .buildMap() .add("g", MinorType.INT) .buildMap() .add("h", MinorType.BIGINT) .build(); RowSetSchema schema = new RowSetSchema(batchSchema); // Access schema: flattened with maps removed FlattenedSchema access = schema.flatAccess(); assertEquals(6, access.count()); crossCheck(access, 0, "c", MinorType.INT); crossCheck(access, 1, "a.b", MinorType.VARCHAR); crossCheck(access, 2, "a.d", MinorType.INT); crossCheck(access, 3, "a.e.f", MinorType.VARCHAR); crossCheck(access, 4, "a.g", MinorType.INT); crossCheck(access, 5, "h", MinorType.BIGINT); // Should have two maps. assertEquals(2, access.mapCount()); assertEquals("a", access.map(0).getName()); assertEquals("e", access.map(1).getName()); assertEquals(0, access.mapIndex("a")); assertEquals(1, access.mapIndex("a.e")); // Verify physical schema: should mirror the schema created above. PhysicalSchema physical = schema.physical(); assertEquals(3, physical.count()); assertEquals("c", physical.column(0).field().getName()); assertEquals("c", physical.column(0).fullName()); assertFalse(physical.column(0).isMap()); assertNull(physical.column(0).mapSchema()); assertEquals("a", physical.column(1).field().getName()); assertEquals("a", physical.column(1).fullName()); assertTrue(physical.column(1).isMap()); assertNotNull(physical.column(1).mapSchema()); assertEquals("h", physical.column(2).field().getName()); assertEquals("h", physical.column(2).fullName()); assertFalse(physical.column(2).isMap()); assertNull(physical.column(2).mapSchema()); PhysicalSchema aSchema = physical.column(1).mapSchema(); assertEquals(4, aSchema.count()); assertEquals("b", aSchema.column(0).field().getName()); assertEquals("a.b", aSchema.column(0).fullName()); assertEquals("d", aSchema.column(1).field().getName()); assertEquals("e", aSchema.column(2).field().getName()); assertEquals("g", aSchema.column(3).field().getName()); PhysicalSchema eSchema = aSchema.column(2).mapSchema(); assertEquals(1, eSchema.count()); assertEquals("f", eSchema.column(0).field().getName()); assertEquals("a.e.f", eSchema.column(0).fullName()); } @Test public void testScalarReaderWriter() { testTinyIntRW(); testSmallIntRW(); testIntRW(); testLongRW(); testFloatRW(); testDoubleRW(); } private void testTinyIntRW() { BatchSchema batchSchema = new SchemaBuilder() .add("col", MinorType.TINYINT) .build(); SingleRowSet rs = fixture.rowSetBuilder(batchSchema) .add(0) .add(Byte.MAX_VALUE) .add(Byte.MIN_VALUE) .build(); RowSetReader reader = rs.reader(); assertTrue(reader.next()); assertEquals(0, reader.column(0).getInt()); assertTrue(reader.next()); assertEquals(Byte.MAX_VALUE, reader.column(0).getInt()); assertTrue(reader.next()); assertEquals(Byte.MIN_VALUE, reader.column(0).getInt()); assertFalse(reader.next()); rs.clear(); } private void testSmallIntRW() { BatchSchema batchSchema = new SchemaBuilder() .add("col", MinorType.SMALLINT) .build(); SingleRowSet rs = fixture.rowSetBuilder(batchSchema) .add(0) .add(Short.MAX_VALUE) .add(Short.MIN_VALUE) .build(); RowSetReader reader = rs.reader(); assertTrue(reader.next()); assertEquals(0, reader.column(0).getInt()); assertTrue(reader.next()); assertEquals(Short.MAX_VALUE, reader.column(0).getInt()); assertTrue(reader.next()); assertEquals(Short.MIN_VALUE, reader.column(0).getInt()); assertFalse(reader.next()); rs.clear(); } private void testIntRW() { BatchSchema batchSchema = new SchemaBuilder() .add("col", MinorType.INT) .build(); SingleRowSet rs = fixture.rowSetBuilder(batchSchema) .add(0) .add(Integer.MAX_VALUE) .add(Integer.MIN_VALUE) .build(); RowSetReader reader = rs.reader(); assertTrue(reader.next()); assertEquals(0, reader.column(0).getInt()); assertTrue(reader.next()); assertEquals(Integer.MAX_VALUE, reader.column(0).getInt()); assertTrue(reader.next()); assertEquals(Integer.MIN_VALUE, reader.column(0).getInt()); assertFalse(reader.next()); rs.clear(); } private void testLongRW() { BatchSchema batchSchema = new SchemaBuilder() .add("col", MinorType.BIGINT) .build(); SingleRowSet rs = fixture.rowSetBuilder(batchSchema) .add(0L) .add(Long.MAX_VALUE) .add(Long.MIN_VALUE) .build(); RowSetReader reader = rs.reader(); assertTrue(reader.next()); assertEquals(0, reader.column(0).getLong()); assertTrue(reader.next()); assertEquals(Long.MAX_VALUE, reader.column(0).getLong()); assertTrue(reader.next()); assertEquals(Long.MIN_VALUE, reader.column(0).getLong()); assertFalse(reader.next()); rs.clear(); } private void testFloatRW() { BatchSchema batchSchema = new SchemaBuilder() .add("col", MinorType.FLOAT4) .build(); SingleRowSet rs = fixture.rowSetBuilder(batchSchema) .add(0F) .add(Float.MAX_VALUE) .add(Float.MIN_VALUE) .build(); RowSetReader reader = rs.reader(); assertTrue(reader.next()); assertEquals(0, reader.column(0).getDouble(), 0.000001); assertTrue(reader.next()); assertEquals(Float.MAX_VALUE, reader.column(0).getDouble(), 0.000001); assertTrue(reader.next()); assertEquals(Float.MIN_VALUE, reader.column(0).getDouble(), 0.000001); assertFalse(reader.next()); rs.clear(); } private void testDoubleRW() { BatchSchema batchSchema = new SchemaBuilder() .add("col", MinorType.FLOAT8) .build(); SingleRowSet rs = fixture.rowSetBuilder(batchSchema) .add(0D) .add(Double.MAX_VALUE) .add(Double.MIN_VALUE) .build(); RowSetReader reader = rs.reader(); assertTrue(reader.next()); assertEquals(0, reader.column(0).getDouble(), 0.000001); assertTrue(reader.next()); assertEquals(Double.MAX_VALUE, reader.column(0).getDouble(), 0.000001); assertTrue(reader.next()); assertEquals(Double.MIN_VALUE, reader.column(0).getDouble(), 0.000001); assertFalse(reader.next()); rs.clear(); } @Test public void testMap() { BatchSchema batchSchema = new SchemaBuilder() .add("a", MinorType.INT) .addMap("b") .add("c", MinorType.INT) .add("d", MinorType.INT) .buildMap() .build(); SingleRowSet rs = fixture.rowSetBuilder(batchSchema) .add(10, 20, 30) .add(40, 50, 60) .build(); RowSetReader reader = rs.reader(); assertTrue(reader.next()); assertEquals(10, reader.column(0).getInt()); assertEquals(20, reader.column(1).getInt()); assertEquals(30, reader.column(2).getInt()); assertEquals(10, reader.column("a").getInt()); assertEquals(30, reader.column("b.d").getInt()); assertTrue(reader.next()); assertEquals(40, reader.column(0).getInt()); assertEquals(50, reader.column(1).getInt()); assertEquals(60, reader.column(2).getInt()); assertFalse(reader.next()); rs.clear(); } @Test public void TestTopScalarArray() { BatchSchema batchSchema = new SchemaBuilder() .add("c", MinorType.INT) .addArray("a", MinorType.INT) .build(); ExtendableRowSet rs1 = fixture.rowSet(batchSchema); RowSetWriter writer = rs1.writer(); writer.column(0).setInt(10); ArrayWriter array = writer.column(1).array(); array.setInt(100); array.setInt(110); writer.save(); writer.column(0).setInt(20); array = writer.column(1).array(); array.setInt(200); array.setInt(120); array.setInt(220); writer.save(); writer.column(0).setInt(30); writer.save(); writer.done(); RowSetReader reader = rs1.reader(); assertTrue(reader.next()); assertEquals(10, reader.column(0).getInt()); ArrayReader arrayReader = reader.column(1).array(); assertEquals(2, arrayReader.size()); assertEquals(100, arrayReader.getInt(0)); assertEquals(110, arrayReader.getInt(1)); assertTrue(reader.next()); assertEquals(20, reader.column(0).getInt()); arrayReader = reader.column(1).array(); assertEquals(3, arrayReader.size()); assertEquals(200, arrayReader.getInt(0)); assertEquals(120, arrayReader.getInt(1)); assertEquals(220, arrayReader.getInt(2)); assertTrue(reader.next()); assertEquals(30, reader.column(0).getInt()); arrayReader = reader.column(1).array(); assertEquals(0, arrayReader.size()); assertFalse(reader.next()); SingleRowSet rs2 = fixture.rowSetBuilder(batchSchema) .add(10, new int[] {100, 110}) .add(20, new int[] {200, 120, 220}) .add(30, null) .build(); new RowSetComparison(rs1) .verifyAndClear(rs2); } }
apache-2.0
googlestadia/performance-layers
layer/frame_time_layer.cc
12814
// Copyright 2020-2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include <inttypes.h> #include <cstdint> #include <ctime> #include <iomanip> #include <sstream> #include <string> #include <type_traits> #include "layer_data.h" #include "layer_utils.h" #include "log_scanner.h" #include "logging.h" namespace { // ---------------------------------------------------------------------------- // Layer book-keeping information // ---------------------------------------------------------------------------- constexpr uint32_t kFrameTimeLayerVersion = 1; constexpr char kLayerName[] = "VK_LAYER_STADIA_frame_time"; constexpr char kLayerDescription[] = "Stadia Frame Time Measuring Layer"; constexpr char kLogFilenameEnvVar[] = "VK_FRAME_TIME_LOG"; constexpr char kExitAfterFrameEnvVar[] = "VK_FRAME_TIME_EXIT_AFTER_FRAME"; constexpr char kFinishFileEnvVar[] = "VK_FRAME_TIME_FINISH_FILE"; constexpr char kBenchmarkWatchFileEnvVar[] = "VK_FRAME_TIME_BENCHMARK_WATCH_FILE"; constexpr char kBenchmarkStartStringEnvVar[] = "VK_FRAME_TIME_BENCHMARK_START_STRING"; const char* StrOrEmpty(const char* str_or_null) { return str_or_null ? str_or_null : ""; } class FrameTimeLayerData : public performancelayers::LayerData { public: FrameTimeLayerData(char* log_filename, uint64_t exit_frame_num_or_invalid, const char* benchmark_watch_filename, const char* benchmark_start_string) : LayerData(log_filename, "Frame Time (ns),Benchmark State"), exit_frame_num_or_invalid_(exit_frame_num_or_invalid), benchmark_start_pattern_(StrOrEmpty(benchmark_start_string)) { LogEventOnly("frame_time_layer_init"); if (!benchmark_watch_filename || strlen(benchmark_watch_filename) == 0) return; benchmark_log_scanner_ = performancelayers::LogScanner::FromFilename(benchmark_watch_filename); if (benchmark_log_scanner_) benchmark_log_scanner_->RegisterWatchedPattern(benchmark_start_pattern_); } ~FrameTimeLayerData() override; static constexpr uint64_t kInvalidFrameNum = ~uint64_t(0); // Returns the next frame number. uint64_t IncrementFrameNum() { return ++current_frame_num_; } uint64_t GetExitFrameNum() const { return exit_frame_num_or_invalid_; } // Returns true if the benchmark gameplay start has been detected. // If benchmark start detection is not configured (through env vars), // assumes that the benchmarks begins with the first frame. bool HasBenchmarkStarted(); private: const uint64_t exit_frame_num_or_invalid_; uint64_t current_frame_num_ = 0; uint32_t benchmark_state_idx_ = 0; std::string benchmark_start_pattern_; std::optional<performancelayers::LogScanner> benchmark_log_scanner_; }; FrameTimeLayerData* GetLayerData() { auto GetExitAfterFrameVal = [] { if (const char* exit_after_frame_val_str = getenv(kExitAfterFrameEnvVar)) { std::stringstream ss; ss << exit_after_frame_val_str; uint64_t exit_frame_val = FrameTimeLayerData::kInvalidFrameNum; ss >> exit_frame_val; return exit_frame_val; } return FrameTimeLayerData::kInvalidFrameNum; }; // Don't use new -- make the destructor run when the layer gets unloaded. static FrameTimeLayerData layer_data( getenv(kLogFilenameEnvVar), GetExitAfterFrameVal(), getenv(kBenchmarkWatchFileEnvVar), getenv(kBenchmarkStartStringEnvVar)); return &layer_data; } // If |kFinishFileEnvVar| is set, this function will create a finish file with // under |finishCause| and time written under that location. void CreateFinishIndicatorFile(const char* finishCause) { assert(finishCause); const char* finish_indicator_file = getenv(kFinishFileEnvVar); if (!finish_indicator_file) return; FILE* finish_file = fopen(finish_indicator_file, "w"); if (!finish_file) return; // Create the application finish indicator file and write the current time // there. This is to aid debugging when the modification time can be lost // when sending the file over a wire. std::time_t curr_time = std::time(nullptr); std::tm tm = *std::localtime(&curr_time); std::ostringstream oss; oss << std::put_time(&tm, "%c %Z"); fprintf(finish_file, "Stadia Frame Time Layer\n%s %s\n", finishCause, oss.str().c_str()); fclose(finish_file); } bool FrameTimeLayerData::HasBenchmarkStarted() { if (benchmark_state_idx_ > 0 || benchmark_start_pattern_.empty()) return true; if (!benchmark_log_scanner_) return true; if (benchmark_log_scanner_->ConsumeNewLines()) { benchmark_state_idx_ = 1; benchmark_log_scanner_.reset(); return true; } return false; } FrameTimeLayerData::~FrameTimeLayerData() { CreateFinishIndicatorFile("APPLICATION_EXIT"); LogEventOnly("frame_time_layer_exit", "application_exit"); } // Use this macro to define all vulkan functions intercepted by the layer. #define SPL_FRAME_TIME_LAYER_FUNC(RETURN_TYPE_, FUNC_NAME_, FUNC_ARGS_) \ SPL_INTERCEPTED_VULKAN_FUNC(RETURN_TYPE_, FrameTimeLayer_, FUNC_NAME_, \ FUNC_ARGS_) ////////////////////////////////////////////////////////////////////////////// // Implementation of the instance functions we want to override. ////////////////////////////////////////////////////////////////////////////// SPL_FRAME_TIME_LAYER_FUNC(VkResult, QueuePresentKHR, (VkQueue queue, const VkPresentInfoKHR* present_info)) { auto* layer_data = GetLayerData(); layer_data->LogTimeDelta("frame_present", layer_data->HasBenchmarkStarted() ? "1" : "0"); uint64_t frames_elapsed = layer_data->IncrementFrameNum(); uint64_t exit_frame_num = layer_data->GetExitFrameNum(); // If the layer should make Vulkan application exit after this frame. if (frames_elapsed == exit_frame_num) { SPL_LOG(INFO) << "Stadia Frame Time Layer: Terminating application after frame " << frames_elapsed; // _Exit will bring down the parent Vulkan application without running any // cleanup. Resources will be reclaimed by the operating system. CreateFinishIndicatorFile("FRAME_TIME_LAYER_TERMINATED"); layer_data->LogEventOnly("frame_time_layer_exit", absl::StrCat("terminated,frame:", frames_elapsed)); std::_Exit(99); } auto next_proc = layer_data->GetNextDeviceProcAddr( queue, &VkLayerDispatchTable::QueuePresentKHR); return next_proc(queue, present_info); } // Override for vkDestroyInstance. Deletes the entry for |instance| from the // layer data. SPL_FRAME_TIME_LAYER_FUNC(void, DestroyInstance, (VkInstance instance, const VkAllocationCallbacks* allocator)) { auto* layer_data = GetLayerData(); auto next_proc = layer_data->GetNextInstanceProcAddr( instance, &VkLayerInstanceDispatchTable::DestroyInstance); next_proc(instance, allocator); layer_data->RemoveInstance(instance); } // Override for vkCreateInstance. Creates the dispatch table for this instance // and add it to the layer data. SPL_FRAME_TIME_LAYER_FUNC(VkResult, CreateInstance, (const VkInstanceCreateInfo* create_info, const VkAllocationCallbacks* allocator, VkInstance* instance)) { auto build_dispatch_table = [instance](PFN_vkGetInstanceProcAddr get_proc_addr) { // Build dispatch table for the instance functions we need to call. VkLayerInstanceDispatchTable dispatch_table{}; // Get the next layer's instance of the instance functions we will // override. SPL_DISPATCH_INSTANCE_FUNC(DestroyInstance); SPL_DISPATCH_INSTANCE_FUNC(GetInstanceProcAddr); return dispatch_table; }; return GetLayerData()->CreateInstance(create_info, allocator, instance, build_dispatch_table); } ////////////////////////////////////////////////////////////////////////////// // Implementation of the device function we want to override. ////////////////////////////////////////////////////////////////////////////// // Override for vkDestroyDevice. Removes the dispatch table for the device from // the layer data. SPL_FRAME_TIME_LAYER_FUNC(void, DestroyDevice, (VkDevice device, const VkAllocationCallbacks* allocator)) { auto* layer_data = GetLayerData(); auto next_proc = layer_data->GetNextDeviceProcAddr( device, &VkLayerDispatchTable::DestroyDevice); next_proc(device, allocator); layer_data->RemoveDevice(device); } // Override for vkCreateDevice. Builds the dispatch table for the new device // and add it to the layer data. SPL_FRAME_TIME_LAYER_FUNC(VkResult, CreateDevice, (VkPhysicalDevice physical_device, const VkDeviceCreateInfo* create_info, const VkAllocationCallbacks* allocator, VkDevice* device)) { auto build_dispatch_table = [device](PFN_vkGetDeviceProcAddr gdpa) { VkLayerDispatchTable dispatch_table{}; // Get the next layer's instance of the device functions we will override. SPL_DISPATCH_DEVICE_FUNC(DestroyDevice); SPL_DISPATCH_DEVICE_FUNC(GetDeviceProcAddr); SPL_DISPATCH_DEVICE_FUNC(QueuePresentKHR); return dispatch_table; }; return GetLayerData()->CreateDevice(physical_device, create_info, allocator, device, build_dispatch_table); } SPL_FRAME_TIME_LAYER_FUNC(VkResult, EnumerateInstanceLayerProperties, (uint32_t * property_count, VkLayerProperties* properties)) { if (property_count) *property_count = 1; if (properties) { strncpy(properties->layerName, kLayerName, sizeof(properties->layerName)); strncpy(properties->description, kLayerDescription, sizeof(properties->description)); properties->implementationVersion = kFrameTimeLayerVersion; properties->specVersion = VK_API_VERSION_1_0; } return VK_SUCCESS; } SPL_FRAME_TIME_LAYER_FUNC(VkResult, EnumerateDeviceLayerProperties, (VkPhysicalDevice /* physical_device */, uint32_t* property_count, VkLayerProperties* properties)) { return FrameTimeLayer_EnumerateInstanceLayerProperties(property_count, properties); } } // namespace // The *GetProcAddr functions are the entry points to the layers. // They return a function pointer for the instance requested by |name|. We // return the functions defined in this layer for those we want to override. // Otherwise we call the *GetProcAddr function for the next layer to get the // function to be called. SPL_LAYER_ENTRY_POINT SPL_FRAME_TIME_LAYER_FUNC(PFN_vkVoidFunction, GetDeviceProcAddr, (VkDevice device, const char* name)) { if (auto func = performancelayers::FunctionInterceptor::GetInterceptedOrNull(name)) { return func; } auto* layer_data = GetLayerData(); PFN_vkGetDeviceProcAddr next_get_proc_addr = layer_data->GetNextDeviceProcAddr( device, &VkLayerDispatchTable::GetDeviceProcAddr); assert(next_get_proc_addr && next_get_proc_addr != VK_NULL_HANDLE); return next_get_proc_addr(device, name); } SPL_LAYER_ENTRY_POINT SPL_FRAME_TIME_LAYER_FUNC(PFN_vkVoidFunction, GetInstanceProcAddr, (VkInstance instance, const char* name)) { if (auto func = performancelayers::FunctionInterceptor::GetInterceptedOrNull(name)) { return func; } auto* layer_data = GetLayerData(); PFN_vkGetInstanceProcAddr next_get_proc_addr = layer_data->GetNextInstanceProcAddr( instance, &VkLayerInstanceDispatchTable::GetInstanceProcAddr); assert(next_get_proc_addr && next_get_proc_addr != VK_NULL_HANDLE); return next_get_proc_addr(instance, name); }
apache-2.0
freignat91/mlearning
mlserver/server/restapi.go
3933
package mlserver import ( "encoding/json" "fmt" "net/http" "os" "strconv" "github.com/gorilla/mux" ) //RetBool . type RetBool struct { Ret bool `json:"ret"` } //RetInt . type RetInt struct { Ret int `json:"ret"` } // FoodCoord . type FoodCoord struct { X float64 `json:"x"` Y float64 `json:"y"` } //AntSelected . type AntSelected struct { Nest int `json:"nest"` Ant int `json:"ant"` } func (s *Server) getData(w http.ResponseWriter, r *http.Request) { data := s.nests.GetGraphicData() //verifJson(data) json.NewEncoder(w).Encode(data) } func (s *Server) nestsStart(w http.ResponseWriter, r *http.Request) { s.nests.Start() saveInfo(r, "nestsStart") json.NewEncoder(w).Encode("{}") } func (s *Server) nestsStop(w http.ResponseWriter, r *http.Request) { s.nests.Stop() saveInfo(r, "nestsStop") json.NewEncoder(w).Encode("{}") } func (s *Server) isStarted(w http.ResponseWriter, r *http.Request) { ret := RetBool{Ret: s.nests.IsStarted()} json.NewEncoder(w).Encode(&ret) } func (s *Server) nextTime(w http.ResponseWriter, r *http.Request) { s.nests.NextTime() saveInfo(r, "nextTime") json.NewEncoder(w).Encode("{}") } func (s *Server) exportAntSample(w http.ResponseWriter, r *http.Request) { nn, err := s.nests.ExportSelectedAntSample() if err != nil { fmt.Printf("Error exporting ant sample: %v\n", err) w.WriteHeader(400) } ret := RetInt{Ret: nn} saveInfo(r, "exportAntSample") json.NewEncoder(w).Encode(&ret) } func (s *Server) setSleep(w http.ResponseWriter, r *http.Request) { val, _ := strconv.Atoi(mux.Vars(r)["value"]) s.nests.SetSleep(val) saveInfo(r, fmt.Sprintf("setSleep: %d", val)) json.NewEncoder(w).Encode("{}") } func (s *Server) setSelected(w http.ResponseWriter, r *http.Request) { decoder := json.NewDecoder(r.Body) var a AntSelected decoder.Decode(&a) s.nests.SetSelected(a.Nest, a.Ant, "") saveInfo(r, "setSelected") json.NewEncoder(w).Encode("{}") } func (s *Server) getGlobalInfo(w http.ResponseWriter, r *http.Request) { saveInfo(r, "getGlobalInfo") info := s.nests.GetGlobalInfo() //verifJson(info) json.NewEncoder(w).Encode(info) } func (s *Server) getInfo(w http.ResponseWriter, r *http.Request) { info := s.nests.GetInfo() //verifJson(info) json.NewEncoder(w).Encode(info) } func (s *Server) restart(w http.ResponseWriter, r *http.Request) { decoder := json.NewDecoder(r.Body) var a RetInt decoder.Decode(&a) s.initNests(a.Ret) saveInfo(r, fmt.Sprintf("restart: %d", a.Ret)) json.NewEncoder(w).Encode("{}") } func (s *Server) addFoods(w http.ResponseWriter, r *http.Request) { decoder := json.NewDecoder(r.Body) var t FoodCoord decoder.Decode(&t) s.nests.AddFoodGroup(t.X, t.Y) saveInfo(r, "addFood") json.NewEncoder(w).Encode("{}") } func (s *Server) foodRenew(w http.ResponseWriter, r *http.Request) { decoder := json.NewDecoder(r.Body) var t RetBool decoder.Decode(&t) s.nests.FoodRenew(t.Ret) saveInfo(r, "foodRenew") json.NewEncoder(w).Encode("{}") } func (s *Server) setPanicMode(w http.ResponseWriter, r *http.Request) { decoder := json.NewDecoder(r.Body) var t RetBool decoder.Decode(&t) s.nests.SetPanicMode(t.Ret) saveInfo(r, "panicMode") json.NewEncoder(w).Encode("{}") } func (s *Server) clearFoodGroup(w http.ResponseWriter, r *http.Request) { s.nests.ClearFoodGroup() saveInfo(r, "clearFoodGroup") json.NewEncoder(w).Encode("{}") } func saveInfo(r *http.Request, line string) { filename := "../addr.txt" addr := fmt.Sprintf("remote addr: %v\n", r.RemoteAddr) f, err := os.OpenFile(filename, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0600) if err != nil { fmt.Printf("error opening in addr file: %v\n", err) } defer f.Close() if _, err = f.WriteString(addr + ": " + line); err != nil { fmt.Printf("error writing in addr file: %v\n", err) } } func verifJSON(v interface{}) { res, err := json.Marshal(v) if err != nil { fmt.Printf("Error: %v\n", err) } fmt.Printf("%s\n", res) }
apache-2.0
MatthewWilkes/mw4068-packaging
src/melange/src/soc/modules/gsoc/views/models/org_app_survey.py
3736
#!/usr/bin/env python2.5 # # Copyright 2010 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Views for GSoCOrgAppSurveys. """ __authors__ = [ '"Lennard de Rijk" <ljvderijk@gmail.com>', ] from soc.logic import dicts from soc.views.helper import decorators from soc.views.models import org_app_survey from soc.modules.gsoc.logic.models.org_app_survey import logic as org_app_logic from soc.modules.gsoc.logic.models.program import logic as program_logic from soc.modules.gsoc.logic.models.student import logic as student_logic from soc.modules.gsoc.tasks import org_app_survey as org_app_survey_tasks from soc.modules.gsoc.views.helper import access from soc.modules.gsoc.views.models.program import view as program_view class View(org_app_survey.View): """View methods for the GSoCOrgAppSurveys model. """ def __init__(self, params=None): """Defines the fields and methods required for the base View class to provide the user with list, public, create, edit and delete views. Params: params: a dict with params for this View """ rights = access.GSoCChecker(params) rights['any_access'] = ['allow'] rights['show'] = ['checkIsDeveloper'] rights['create'] = [('checkIsHostForProgramInScope', program_logic)] rights['edit'] = [('checkIsHostForProgramInScope', program_logic)] rights['delete'] = ['checkIsDeveloper'] rights['list'] = ['checkIsDeveloper'] rights['list_self'] = ['checkIsUser'] rights['record'] = [('checkHasAny', [ [('checkCanViewOrgAppRecord', [org_app_logic]), ('checkIsSurveyReadable', [org_app_logic])] ])] rights['results'] = [('checkIsHostForProgramInScope', program_logic)] rights['review'] = [('checkIsHostForProgramInScope', program_logic), ('checkCanReviewOrgAppRecord', [org_app_logic])] rights['review_overview'] = [('checkIsHostForProgramInScope', program_logic)] rights['take'] = [ ('checkOrgAppRecordIfPresent', org_app_logic), ('checkIsActivePeriod', ['org_signup', 'scope_path', program_logic]), ('checkIsSurveyTakeable', org_app_logic), ('checkIsNotStudentForProgramInScope', [program_logic, student_logic])] new_params = {} new_params['logic'] = org_app_logic new_params['rights'] = rights new_params['scope_view'] = program_view new_params['name'] = "GSoC Org Application Survey" new_params['url_name'] = 'gsoc/org_app' new_params['module_package'] = 'soc.modules.gsoc.views.models' new_params['module_name'] = 'org_app_survey' new_params['bulk_process_task'] = org_app_survey_tasks.bulk_process params = dicts.merge(params, new_params, sub_merge=True) super(View, self).__init__(params=params) view = View() create = decorators.view(view.create) edit = decorators.view(view.edit) delete = decorators.view(view.delete) list = decorators.view(view.list) list_self = decorators.view(view.listSelf) public = decorators.view(view.public) record = decorators.view(view.viewRecord) results = decorators.view(view.viewResults) review = decorators.view(view.review) review_overview = decorators.view(view.reviewOverview) take = decorators.view(view.take)
apache-2.0
b002368/chef-repo
lib/chef/data_collector/messages/helpers.rb
5641
# # Author:: Adam Leff (<adamleff@chef.io) # Author:: Ryan Cragun (<ryan@chef.io>) # # Copyright:: Copyright 2012-2016, Chef Software Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # class Chef class DataCollector module Messages module Helpers # # Fully-qualified domain name of the Chef Server configured in Chef::Config # If the chef_server_url cannot be parsed as a URI, the node["fqdn"] attribute # will be returned, or "localhost" if the run_status is unavailable to us. # # @param run_status [Chef::RunStatus] The RunStatus object for this Chef Run. # # @return [String] FQDN of the configured Chef Server, or node/localhost if not found. # def chef_server_fqdn(run_status) if !Chef::Config[:chef_server_url].nil? URI(Chef::Config[:chef_server_url]).host elsif !Chef::Config[:node_name].nil? Chef::Config[:node_name] else "localhost" end end # # The organization name the node is associated with. For Chef Solo runs, a # user-configured organization string is returned, or the string "chef_solo" # if such a string is not configured. # # @return [String] Organization to which the node is associated # def organization solo_run? ? data_collector_organization : chef_server_organization end # # Returns the user-configured organization, or "chef_solo" if none is configured. # # This is only used when Chef is run in Solo mode. # # @return [String] Data-collector-specific organization used when running in Chef Solo # def data_collector_organization Chef::Config[:data_collector][:organization] || "chef_solo" end # # Return the organization assumed by the configured chef_server_url. # # We must parse this from the Chef::Config[:chef_server_url] because a node # has no knowledge of an organization or to which organization is belongs. # # If we cannot determine the organization, we return "unknown_organization" # # @return [String] shortname of the Chef Server organization # def chef_server_organization return "unknown_organization" unless Chef::Config[:chef_server_url] Chef::Config[:chef_server_url].match(%r{/+organizations/+(\w+)}).nil? ? "unknown_organization" : $1 end # # The source of the data collecting during this run, used by the # DataCollector endpoint to determine if Chef was in Solo mode or not. # # @return [String] "chef_solo" if in Solo mode, "chef_client" if in Client mode # def collector_source solo_run? ? "chef_solo" : "chef_client" end # # If we're running in Solo (legacy) mode, or in Solo (formerly # "Chef Client Local Mode"), we're considered to be in a "solo run". # # @return [Boolean] Whether we're in a solo run or not # def solo_run? Chef::Config[:solo] || Chef::Config[:local_mode] end # # Returns a UUID that uniquely identifies this node for reporting reasons. # # The node is read in from disk if it exists, or it's generated if it does # does not exist. # # @return [String] UUID for the node # def node_uuid read_node_uuid || generate_node_uuid end # # Generates a UUID for the node via SecureRandom.uuid and writes out # metadata file so the UUID persists between runs. # # @return [String] UUID for the node # def generate_node_uuid uuid = SecureRandom.uuid update_metadata("node_uuid", uuid) uuid end # # Reads in the node UUID from the node metadata file # # @return [String] UUID for the node # def read_node_uuid metadata["node_uuid"] end # # Returns the DataCollector metadata for this node # # If the metadata file does not exist in the file cache path, # an empty hash will be returned. # # @return [Hash] DataCollector metadata for this node # def metadata Chef::JSONCompat.parse(Chef::FileCache.load(metadata_filename)) rescue Chef::Exceptions::FileNotFound {} end def update_metadata(key, value) updated_metadata = metadata.tap { |x| x[key] = value } Chef::FileCache.store(metadata_filename, Chef::JSONCompat.to_json(updated_metadata), 0644) end def metadata_filename "data_collector_metadata.json" end end end end end
apache-2.0
RasaHQ/rasa_core
rasa/core/run.py
7791
import asyncio from functools import partial import argparse import logging from sanic import Sanic from sanic_cors import CORS from typing import List, Optional, Text import rasa.core.cli.arguments import rasa.utils import rasa.core from rasa.core import constants, utils, cli from rasa.core.channels import (BUILTIN_CHANNELS, InputChannel, console) from rasa.core.interpreter import NaturalLanguageInterpreter from rasa.core.tracker_store import TrackerStore from rasa.core.utils import AvailableEndpoints, read_yaml_file logger = logging.getLogger() # get the root logger def create_argument_parser(): """Parse all the command line arguments for the run script.""" parser = argparse.ArgumentParser( description='starts the bot') parser.add_argument( '-d', '--core', required=True, type=str, help="core model to run") parser.add_argument( '-u', '--nlu', type=str, help="nlu model to run") cli.arguments.add_logging_option_arguments(parser) cli.run.add_run_arguments(parser) return parser def create_http_input_channels( channel: Optional[Text], credentials_file: Optional[Text] ) -> List['InputChannel']: """Instantiate the chosen input channel.""" if credentials_file: all_credentials = read_yaml_file(credentials_file) else: all_credentials = {} if channel: return [_create_single_channel(channel, all_credentials.get(channel))] else: return [_create_single_channel(c, k) for c, k in all_credentials.items()] def _create_single_channel(channel, credentials): from rasa.core.channels import BUILTIN_CHANNELS if channel in BUILTIN_CHANNELS: return BUILTIN_CHANNELS[channel].from_credentials(credentials) else: # try to load channel based on class name try: input_channel_class = utils.class_from_module_path(channel) return input_channel_class.from_credentials(credentials) except (AttributeError, ImportError): raise Exception( "Failed to find input channel class for '{}'. Unknown " "input channel. Check your credentials configuration to " "make sure the mentioned channel is not misspelled. " "If you are creating your own channel, make sure it " "is a proper name of a class in a module.".format(channel)) def configure_app(input_channels=None, cors=None, auth_token=None, enable_api=True, jwt_secret=None, jwt_method=None, route="/webhooks/", port=None): """Run the agent.""" from rasa.core import server if enable_api: app = server.create_app(cors_origins=cors, auth_token=auth_token, jwt_secret=jwt_secret, jwt_method=jwt_method) else: app = Sanic(__name__) CORS(app, resources={r"/*": {"origins": cors or ""}}, automatic_options=True) if input_channels: rasa.core.channels.channel.register(input_channels, app, route=route) else: input_channels = [] if logger.isEnabledFor(logging.DEBUG): utils.list_routes(app) # configure async loop logging async def configure_logging(): if logger.isEnabledFor(logging.DEBUG): utils.enable_async_loop_debugging(asyncio.get_event_loop()) app.add_task(configure_logging) if "cmdline" in {c.name() for c in input_channels}: async def run_cmdline_io(running_app: Sanic): """Small wrapper to shut down the server once cmd io is done.""" await asyncio.sleep(1) # allow server to start await console.record_messages( server_url=constants.DEFAULT_SERVER_FORMAT.format(port)) logger.info("Killing Sanic server now.") running_app.stop() # kill the sanic serverx app.add_task(run_cmdline_io) return app def serve_application(core_model=None, nlu_model=None, channel=None, port=constants.DEFAULT_SERVER_PORT, credentials_file=None, cors=None, auth_token=None, enable_api=True, jwt_secret=None, jwt_method=None, endpoints=None ): if not channel and not credentials_file: channel = "cmdline" input_channels = create_http_input_channels(channel, credentials_file) app = configure_app(input_channels, cors, auth_token, enable_api, jwt_secret, jwt_method, port=port) logger.info("Starting Rasa Core server on " "{}".format(constants.DEFAULT_SERVER_FORMAT.format(port))) app.register_listener( partial(load_agent_on_start, core_model, endpoints, nlu_model), 'before_server_start') app.run(host='0.0.0.0', port=port, access_log=logger.isEnabledFor(logging.DEBUG)) # noinspection PyUnusedLocal async def load_agent_on_start(core_model, endpoints, nlu_model, app, loop): """Load an agent. Used to be scheduled on server start (hence the `app` and `loop` arguments).""" from rasa.core import broker from rasa.core.agent import Agent _interpreter = NaturalLanguageInterpreter.create(nlu_model, endpoints.nlu) _broker = broker.from_endpoint_config(endpoints.event_broker) _tracker_store = TrackerStore.find_tracker_store( None, endpoints.tracker_store, _broker) if endpoints and endpoints.model: from rasa.core import agent app.agent = Agent(interpreter=_interpreter, generator=endpoints.nlg, tracker_store=_tracker_store, action_endpoint=endpoints.action) await agent.load_from_server(app.agent, model_server=endpoints.model) else: app.agent = Agent.load(core_model, interpreter=_interpreter, generator=endpoints.nlg, tracker_store=_tracker_store, action_endpoint=endpoints.action) return app.agent if __name__ == '__main__': # Running as standalone python application arg_parser = create_argument_parser() cmdline_args = arg_parser.parse_args() logging.getLogger('werkzeug').setLevel(logging.WARN) logging.getLogger('engineio').setLevel(logging.WARN) logging.getLogger('matplotlib').setLevel(logging.WARN) logging.getLogger('socketio').setLevel(logging.ERROR) rasa.utils.configure_colored_logging(cmdline_args.loglevel) utils.configure_file_logging(cmdline_args.loglevel, cmdline_args.log_file) _endpoints = AvailableEndpoints.read_endpoints(cmdline_args.endpoints) serve_application(cmdline_args.core, cmdline_args.nlu, cmdline_args.connector, cmdline_args.port, cmdline_args.credentials, cmdline_args.cors, cmdline_args.auth_token, cmdline_args.enable_api, cmdline_args.jwt_secret, cmdline_args.jwt_method, _endpoints)
apache-2.0
michaelkleinhenz/themis
schema/linktype_blocks.go
2044
package schema import ( "themis/models" ) func createLinkTypeBlocks() []models.LinkType { linkTypes := []models.LinkType { createLinkTypeBlocksBugToStory(), createLinkTypeBlocksBugToTask(), createLinkTypeBlocksTaskToStory(), createLinkTypeBlocksStoryToStory(), } return linkTypes } func createLinkTypeBlocksBugToStory() models.LinkType { linkType := models.NewLinkType() linkType.Name = "Blocker Link" linkType.Description = "The blocker relationship." linkType.Version = 0 linkType.ForwardName = "blocks" linkType.ReverseName = "is blocked by" linkType.Topology = "graph" linkType.CategoryRef = "default" linkType.SourceWorkItemTypeRef = "bug" linkType.TargetWorkItemTypeRef = "story" return *linkType } func createLinkTypeBlocksBugToTask() models.LinkType { linkType := models.NewLinkType() linkType.Name = "Blocker Link" linkType.Description = "The blocker relationship." linkType.Version = 0 linkType.ForwardName = "blocks" linkType.ReverseName = "is blocked by" linkType.Topology = "graph" linkType.CategoryRef = "default" linkType.SourceWorkItemTypeRef = "bug" linkType.TargetWorkItemTypeRef = "task" return *linkType } func createLinkTypeBlocksTaskToStory() models.LinkType { linkType := models.NewLinkType() linkType.Name = "Blocker Link" linkType.Description = "The blocker relationship." linkType.Version = 0 linkType.ForwardName = "blocks" linkType.ReverseName = "is blocked by" linkType.Topology = "graph" linkType.CategoryRef = "default" linkType.SourceWorkItemTypeRef = "task" linkType.TargetWorkItemTypeRef = "story" return *linkType } func createLinkTypeBlocksStoryToStory() models.LinkType { linkType := models.NewLinkType() linkType.Name = "Blocker Link" linkType.Description = "The blocker relationship." linkType.Version = 0 linkType.ForwardName = "blocks" linkType.ReverseName = "is blocked by" linkType.Topology = "graph" linkType.CategoryRef = "default" linkType.SourceWorkItemTypeRef = "story" linkType.TargetWorkItemTypeRef = "story" return *linkType }
apache-2.0
skonves/Konves.KScript
src/Konves.KScript/ExpressionFactory.cs
3636
using Antlr4.Runtime; using Antlr4.Runtime.Tree; using Konves.KScript.Expressions; using Konves.KScript.Parsing; using System; using System.Collections.Generic; using System.Linq; using static Konves.KScript.Parsing.KScriptParser; namespace Konves.KScript { internal static class ExpressionFactory { internal static IExpression Create(ExpressionContext context) { if (context.children.Count == 1) { ITerminalNode first = context.children[0] as ITerminalNode; if (first != null && first.Symbol.Type == KScriptLexer.TRUE) { return new BooleanLiteralExpression(true); } else if (first != null && first.Symbol.Type == KScriptLexer.FALSE) { return new BooleanLiteralExpression(false); } else if (context.children[0] is BetweenExpressionContext) { BetweenExpressionContext btContext = context.children[0] as BetweenExpressionContext; Literal a = LiteralFactory.Create(btContext.literal(0)); Literal b = LiteralFactory.Create(btContext.literal(1)); Literal c = LiteralFactory.Create(btContext.literal(2)); return new AndExpression( new RelationalExpression(a, b, KScriptLexer.GT_ET), new RelationalExpression(a, c, KScriptLexer.LT)); } else if (context.children[0] is InExpressionContext) { InExpressionContext inContext = context.children[0] as InExpressionContext; Literal literal = LiteralFactory.Create(inContext.literal()); IReadOnlyCollection<Literal> list = inContext.list().literal().Select(LiteralFactory.Create).ToList(); return new InExpression(literal, list); } else if (context.children[0] is RelationalExpressionContext) { RelationalExpressionContext relContext = context.children[0] as RelationalExpressionContext; Literal a = LiteralFactory.Create(relContext.literal(0)); IToken operatorToken = (relContext.children[1] as ITerminalNode).Symbol; Literal b = LiteralFactory.Create(relContext.literal(1)); return new RelationalExpression(a, b, operatorToken.Type); } else { throw new InvalidOperationException(); } } if (context.children.Count == 2) { ITerminalNode first = context.children[0] as ITerminalNode; ITerminalNode last = context.children[1] as ITerminalNode; if (first.Symbol.Type == KScriptLexer.NOT) { return new NotExpression(Create(context.expression()[0])); } else { throw new InvalidOperationException(); } } else if (context.children.Count == 3) { ITerminalNode first = context.children[0] as ITerminalNode; ITerminalNode middle = context.children[1] as ITerminalNode; ITerminalNode last = context.children[2] as ITerminalNode; if (middle != null && middle.Symbol.Type == KScriptLexer.AND) { IExpression a = Create(context.children[0] as ExpressionContext); IExpression b = Create(context.children[2] as ExpressionContext); return new AndExpression(a, b); } else if (middle != null && middle.Symbol.Type == KScriptLexer.OR) { IExpression a = Create(context.children[0] as ExpressionContext); IExpression b = Create(context.children[2] as ExpressionContext); return new OrExpression(a, b); } else if (first != null && last != null && first.Symbol.Type == KScriptLexer.OPEN_PAREN && last.Symbol.Type == KScriptLexer.CLOSE_PAREN) { return Create(context.children[1] as ExpressionContext); } else { throw new InvalidOperationException(); } } else { throw new InvalidOperationException(); } throw new NotImplementedException(); } } }
apache-2.0
rjf1979/GNF
GNF.Domain/Entities/Entity.cs
2880
using System; using System.Collections.Generic; using System.Reflection; namespace GNF.Domain.Entities { /// <summary> /// 实体抽象类 /// </summary> public abstract class Entity : Entity<int>, IEntity { } /// <summary> /// 实体抽象类 /// </summary> /// <typeparam name="TPrimaryKey">Type of the primary key of the entity</typeparam> public abstract class Entity<TPrimaryKey> : IEntity<TPrimaryKey> { /// <summary> /// Unique identifier for this entity. /// </summary> public virtual TPrimaryKey Id { get; set; } /// <summary> /// 是否是无需持久化实体对象 /// </summary> /// <returns>True, if this entity is transient</returns> public virtual bool IsTransient() { //检测ID值是否是类型默认值 if (EqualityComparer<TPrimaryKey>.Default.Equals(Id, default(TPrimaryKey))) { return true; } if (typeof(TPrimaryKey) == typeof(int)) { return Convert.ToInt32(Id) <= 0; } if (typeof(TPrimaryKey) == typeof(long)) { return Convert.ToInt64(Id) <= 0; } return false; } public override bool Equals(object obj) { if (!(obj is Entity<TPrimaryKey>)) { return false; } //Same instances must be considered as equal if (ReferenceEquals(this, obj)) { return true; } //Transient objects are not considered as equal var other = (Entity<TPrimaryKey>)obj; if (IsTransient() && other.IsTransient()) { return false; } var typeOfThis = GetType(); var typeOfOther = other.GetType(); if (!typeOfThis.GetTypeInfo().IsAssignableFrom(typeOfOther) && !typeOfOther.GetTypeInfo().IsAssignableFrom(typeOfThis)) { return false; } return Id.Equals(other.Id); } public override int GetHashCode() { return Id.GetHashCode(); } public static bool operator ==(Entity<TPrimaryKey> left, Entity<TPrimaryKey> right) { if (Equals(left, null)) { return Equals(right, null); } return left.Equals(right); } public static bool operator !=(Entity<TPrimaryKey> left, Entity<TPrimaryKey> right) { return !(left == right); } public override string ToString() { return $"[{GetType().Name} {Id}]"; } } }
apache-2.0
yuhaya/LocalServer
models/grades.go
1150
package models import "github.com/astaxie/beego/orm" type Grades struct { Id uint64 `orm:"fk;auto"` Guid string `orm:"unique;size(50)"` Name string `orm:"size(50)"` Rating uint64 `orm:"default(0)"` SchoolGuid string `orm:"size(50)"` } func (m *Grades) Insert() error { if _, err := orm.NewOrm().Insert(m); err != nil { return err } return nil } func (m *Grades) Delete(guid string) error { if _, err := orm.NewOrm().QueryTable(m).Filter("guid", guid).Delete(); err != nil { return err } return nil } func (m *Grades) Update(fields ...string) error { if _, err := orm.NewOrm().Update(m, fields...); err != nil { return err } return nil } func (m *Grades) Query() orm.QuerySeter { return orm.NewOrm().QueryTable(m) } func (m *Grades) MaxRating() uint64 { var list []*Grades if count, _ := m.Query().Count(); count > 0 { m.Query().OrderBy("-rating").All(&list) } else { return 0 } return list[0].Rating } /** * 获取所有的年纪信息 */ func GetAll() []*Grades { o := orm.NewOrm() var list []*Grades qs := o.QueryTable(new(Grades)) qs.OrderBy("-rating").All(&list) return list }
apache-2.0
YoungPeanut/YoungSamples
app/src/main/java/info/ipeanut/youngsamples/third/viewgroup/MaterialRippleLayout.java
29156
/* * Copyright (C) 2014 Balys Valentukevicius * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package info.ipeanut.youngsamples.third.viewgroup; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; import android.content.Context; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Path; import android.graphics.Point; import android.graphics.Rect; import android.graphics.RectF; import android.os.Build; import android.util.AttributeSet; import android.util.Property; import android.util.TypedValue; import android.view.GestureDetector; import android.view.MotionEvent; import android.view.View; import android.view.ViewConfiguration; import android.view.ViewGroup; import android.view.ViewParent; import android.view.animation.AccelerateInterpolator; import android.view.animation.DecelerateInterpolator; import android.view.animation.LinearInterpolator; import android.widget.AdapterView; import android.widget.FrameLayout; import info.ipeanut.youngsamples.R; import static android.view.GestureDetector.SimpleOnGestureListener; import static android.view.ViewGroup.LayoutParams.MATCH_PARENT; /** * * <com.balysv.materialripple.MaterialRippleLayout xmlns:android="http://schemas.android.com/apk/res/android" xmlns:app="http://schemas.android.com/apk/res-auto" android:layout_width="match_parent" app:mrl_rippleInAdapter="true" android:layout_height="match_parent"> <TextView android:id="@android:id/text1" android:paddingTop="12dp" android:paddingLeft="30dp" android:paddingRight="30dp" android:paddingBottom="12dp" android:textSize="18sp" android:layout_width="match_parent" android:layout_height="match_parent"> </TextView> </com.balysv.materialripple.MaterialRippleLayout> * */ public class MaterialRippleLayout extends FrameLayout { private static final int DEFAULT_DURATION = 300; private static final int DEFAULT_FADE_DURATION = 75; private static final float DEFAULT_DIAMETER_DP = 35; private static final float DEFAULT_ALPHA = 0.1f; private static final int DEFAULT_COLOR = Color.BLACK; private static final boolean DEFAULT_HOVER = true; private static final boolean DEFAULT_DELAY_CLICK = true; private static final boolean DEFAULT_PERSISTENT = false; private static final boolean DEFAULT_SEARCH_ADAPTER = false; private static final boolean DEFAULT_RIPPLE_OVERLAY = false; private static final int DEFAULT_ROUNDED_CORNERS = 0; private static final int FADE_EXTRA_DELAY = 50; private static final long HOVER_DURATION = 2500; private static boolean enableRipple = true; //用于偏好设置 private final Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG); private final Rect bounds = new Rect(); private int rippleColor; private boolean rippleOverlay; private boolean rippleHover; private int rippleDiameter; private int rippleDuration; private int rippleAlpha; private boolean rippleDelayClick; private int rippleFadeDuration; private boolean ripplePersistent; private boolean rippleInAdapter; // 对触摸点的坐标有影响 private float rippleRoundedCorners; private float radius; private AdapterView parentAdapter; private View childView; private AnimatorSet rippleAnimator; private ObjectAnimator hoverAnimator; private Point currentCoords = new Point(); private Point previousCoords = new Point(); private int layerType; private boolean eventCancelled; private boolean prepressed; private int positionInAdapter; private GestureDetector gestureDetector; private PerformClickEvent pendingClickEvent; private PressedEvent pendingPressEvent; public static RippleBuilder on(View view) { return new RippleBuilder(view); } public MaterialRippleLayout(Context context) { this(context, null, 0); } public MaterialRippleLayout(Context context, AttributeSet attrs) { this(context, attrs, 0); } public MaterialRippleLayout(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); setWillNotDraw(false); gestureDetector = new GestureDetector(context, longClickListener); TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.MaterialRippleLayout); rippleColor = a.getColor(R.styleable.MaterialRippleLayout_mrl_rippleColor, DEFAULT_COLOR); rippleDiameter = a.getDimensionPixelSize( R.styleable.MaterialRippleLayout_mrl_rippleDimension, (int) dpToPx(getResources(), DEFAULT_DIAMETER_DP) ); rippleOverlay = a.getBoolean(R.styleable.MaterialRippleLayout_mrl_rippleOverlay, DEFAULT_RIPPLE_OVERLAY); rippleHover = a.getBoolean(R.styleable.MaterialRippleLayout_mrl_rippleHover, DEFAULT_HOVER); rippleDuration = a.getInt(R.styleable.MaterialRippleLayout_mrl_rippleDuration, DEFAULT_DURATION); rippleAlpha = (int) (255 * a.getFloat(R.styleable.MaterialRippleLayout_mrl_rippleAlpha, DEFAULT_ALPHA)); rippleDelayClick = a.getBoolean(R.styleable.MaterialRippleLayout_mrl_rippleDelayClick, DEFAULT_DELAY_CLICK); rippleFadeDuration = a.getInteger(R.styleable.MaterialRippleLayout_mrl_rippleFadeDuration, DEFAULT_FADE_DURATION); ripplePersistent = a.getBoolean(R.styleable.MaterialRippleLayout_mrl_ripplePersistent, DEFAULT_PERSISTENT); rippleInAdapter = a.getBoolean(R.styleable.MaterialRippleLayout_mrl_rippleInAdapter, DEFAULT_SEARCH_ADAPTER); rippleRoundedCorners = a.getDimensionPixelSize(R.styleable.MaterialRippleLayout_mrl_rippleRoundedCorners, DEFAULT_ROUNDED_CORNERS); a.recycle(); paint.setColor(rippleColor); paint.setAlpha(rippleAlpha); enableClipPathSupportIfNecessary(); } @SuppressWarnings("unchecked") public <T extends View> T getChildView() { return (T) childView; } @Override public final void addView(View child, int index, ViewGroup.LayoutParams params) { if (getChildCount() > 0) { throw new IllegalStateException("MaterialRippleLayout can host only one child"); } //noinspection unchecked childView = child; super.addView(child, index, params); } @Override public void setOnClickListener(OnClickListener onClickListener) { if (childView == null) { throw new IllegalStateException("MaterialRippleLayout must have a child view to handle clicks"); } childView.setOnClickListener(onClickListener); } @Override public boolean onInterceptTouchEvent(MotionEvent event) { if(!enableRipple) { return super.onInterceptTouchEvent(event); } return !findClickableViewInChild(childView, (int) event.getX(), (int) event.getY()); } @Override public boolean onTouchEvent(MotionEvent event) { if(!enableRipple) { return super.onTouchEvent(event); } boolean superOnTouchEvent = super.onTouchEvent(event); if (!isEnabled() || !childView.isEnabled()) return superOnTouchEvent; boolean isEventInBounds = bounds.contains((int) event.getX(), (int) event.getY()); if (isEventInBounds) { previousCoords.set(currentCoords.x, currentCoords.y); currentCoords.set((int) event.getX(), (int) event.getY()); } boolean gestureResult = gestureDetector.onTouchEvent(event); if (gestureResult || mHasPerformedLongPress) { return true; } else { int action = event.getActionMasked(); switch (action) { case MotionEvent.ACTION_UP: pendingClickEvent = new PerformClickEvent(); if (prepressed) { childView.setPressed(true); postDelayed( new Runnable() { @Override public void run() { childView.setPressed(false); } }, ViewConfiguration.getPressedStateDuration()); } if (isEventInBounds) { startRipple(pendingClickEvent); } else if (!rippleHover) { setRadius(0); } if (!rippleDelayClick && isEventInBounds) { pendingClickEvent.run(); } cancelPressedEvent(); break; case MotionEvent.ACTION_DOWN: setPositionInAdapter(); eventCancelled = false; pendingPressEvent = new PressedEvent(event); if (isInScrollingContainer()) { cancelPressedEvent(); prepressed = true; postDelayed(pendingPressEvent, ViewConfiguration.getTapTimeout()); } else { pendingPressEvent.run(); } break; case MotionEvent.ACTION_CANCEL: if (rippleInAdapter) { // dont use current coords in adapter since they tend to jump drastically on scroll currentCoords.set(previousCoords.x, previousCoords.y); previousCoords = new Point(); } childView.onTouchEvent(event); if (rippleHover) { if (!prepressed) { startRipple(null); } } else { childView.setPressed(false); } cancelPressedEvent(); break; case MotionEvent.ACTION_MOVE: if (rippleHover) { if (isEventInBounds && !eventCancelled) { invalidate(); } else if (!isEventInBounds) { startRipple(null); } } if (!isEventInBounds) { cancelPressedEvent(); if (hoverAnimator != null) { hoverAnimator.cancel(); } childView.onTouchEvent(event); eventCancelled = true; } break; } return true; } } private void cancelPressedEvent() { if (pendingPressEvent != null) { removeCallbacks(pendingPressEvent); prepressed = false; } } private boolean mHasPerformedLongPress; private SimpleOnGestureListener longClickListener = new SimpleOnGestureListener() { public void onLongPress(MotionEvent e) { mHasPerformedLongPress = childView.performLongClick(); if (mHasPerformedLongPress) { if (rippleHover) { startRipple(null); } cancelPressedEvent(); } } @Override public boolean onDown(MotionEvent e) { mHasPerformedLongPress = false; return super.onDown(e); } }; private void startHover() { if (eventCancelled) return; if (hoverAnimator != null) { hoverAnimator.cancel(); } final float radius = (float) (Math.sqrt(Math.pow(getWidth(), 2) + Math.pow(getHeight(), 2)) * 1.2f); hoverAnimator = ObjectAnimator.ofFloat(this, radiusProperty, rippleDiameter, radius) .setDuration(HOVER_DURATION); hoverAnimator.setInterpolator(new LinearInterpolator()); hoverAnimator.start(); } private void startRipple(final Runnable animationEndRunnable) { if (eventCancelled) return; float endRadius = getEndRadius(); cancelAnimations(); rippleAnimator = new AnimatorSet(); rippleAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { if (!ripplePersistent) { setRadius(0); setRippleAlpha(rippleAlpha); } if (animationEndRunnable != null && rippleDelayClick) { animationEndRunnable.run(); } childView.setPressed(false); } }); ObjectAnimator ripple = ObjectAnimator.ofFloat(this, radiusProperty, radius, endRadius); ripple.setDuration(rippleDuration); ripple.setInterpolator(new DecelerateInterpolator()); ObjectAnimator fade = ObjectAnimator.ofInt(this, circleAlphaProperty, rippleAlpha, 0); fade.setDuration(rippleFadeDuration); fade.setInterpolator(new AccelerateInterpolator()); fade.setStartDelay(rippleDuration - rippleFadeDuration - FADE_EXTRA_DELAY); if (ripplePersistent) { rippleAnimator.play(ripple); } else if (getRadius() > endRadius) { fade.setStartDelay(0); rippleAnimator.play(fade); } else { rippleAnimator.playTogether(ripple, fade); } rippleAnimator.start(); } private void cancelAnimations() { if (rippleAnimator != null) { rippleAnimator.cancel(); rippleAnimator.removeAllListeners(); } if (hoverAnimator != null) { hoverAnimator.cancel(); } } private float getEndRadius() { final int width = getWidth(); final int height = getHeight(); final int halfWidth = width / 2; final int halfHeight = height / 2; final float radiusX = halfWidth > currentCoords.x ? width - currentCoords.x : currentCoords.x; final float radiusY = halfHeight > currentCoords.y ? height - currentCoords.y : currentCoords.y; return (float) Math.sqrt(Math.pow(radiusX, 2) + Math.pow(radiusY, 2)) * 1.2f; } private boolean isInScrollingContainer() { ViewParent p = getParent(); while (p != null && p instanceof ViewGroup) { if (((ViewGroup) p).shouldDelayChildPressedState()) { return true; } p = p.getParent(); } return false; } private AdapterView findParentAdapterView() { if (parentAdapter != null) { return parentAdapter; } ViewParent current = getParent(); while (true) { if (current instanceof AdapterView) { parentAdapter = (AdapterView) current; return parentAdapter; } else { try { current = current.getParent(); } catch (NullPointerException npe) { throw new RuntimeException("Could not find a parent AdapterView"); } } } } private void setPositionInAdapter() { if (rippleInAdapter) { positionInAdapter = findParentAdapterView().getPositionForView(MaterialRippleLayout.this); } } private boolean adapterPositionChanged() { if (rippleInAdapter) { int newPosition = findParentAdapterView().getPositionForView(MaterialRippleLayout.this); final boolean changed = newPosition != positionInAdapter; positionInAdapter = newPosition; if (changed) { cancelPressedEvent(); cancelAnimations(); childView.setPressed(false); setRadius(0); } return changed; } return false; } private boolean findClickableViewInChild(View view, int x, int y) { if (view instanceof ViewGroup) { ViewGroup viewGroup = (ViewGroup) view; for (int i = 0; i < viewGroup.getChildCount(); i++) { View child = viewGroup.getChildAt(i); final Rect rect = new Rect(); child.getHitRect(rect); final boolean contains = rect.contains(x, y); if (contains) { return findClickableViewInChild(child, x - rect.left, y - rect.top); } } } else if (view != childView) { return (view.isEnabled() && (view.isClickable() || view.isLongClickable() || view.isFocusableInTouchMode())); } return view.isFocusableInTouchMode(); } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); bounds.set(0, 0, w, h); } @Override public boolean isInEditMode() { return true; } /* * Drawing */ @Override public void draw(Canvas canvas) { if(!enableRipple) { super.draw(canvas); return; } final boolean positionChanged = adapterPositionChanged(); Path paddingPath = new Path(); Rect padding = new Rect(); if(getBackground()!=null) { getBackground().getPadding(padding); } paddingPath.addRect(padding.left, padding.top, canvas.getWidth() - padding.right , canvas.getHeight() - padding.bottom, Path.Direction.CW); if (rippleOverlay) { super.draw(canvas); if (!positionChanged) { canvas.save(); canvas.clipPath(paddingPath); if (rippleRoundedCorners != 0) { Path clipPath = new Path(); RectF rect = new RectF(padding.left, padding.top, canvas.getWidth() - padding.right, canvas.getHeight() - padding.bottom); clipPath.addRoundRect(rect, rippleRoundedCorners, rippleRoundedCorners, Path.Direction.CW); canvas.clipPath(clipPath); } canvas.drawCircle(currentCoords.x, currentCoords.y, radius, paint); canvas.restore(); } } else { if (!positionChanged) { canvas.save(); canvas.clipPath(paddingPath); canvas.drawCircle(currentCoords.x, currentCoords.y, radius, paint); canvas.restore(); } super.draw(canvas); } } /* * Animations */ private Property<MaterialRippleLayout, Float> radiusProperty = new Property<MaterialRippleLayout, Float>(Float.class, "radius") { @Override public Float get(MaterialRippleLayout object) { return object.getRadius(); } @Override public void set(MaterialRippleLayout object, Float value) { object.setRadius(value); } }; private float getRadius() { return radius; } public void setRadius(float radius) { this.radius = radius; invalidate(); } private Property<MaterialRippleLayout, Integer> circleAlphaProperty = new Property<MaterialRippleLayout, Integer>(Integer.class, "rippleAlpha") { @Override public Integer get(MaterialRippleLayout object) { return object.getRippleAlpha(); } @Override public void set(MaterialRippleLayout object, Integer value) { object.setRippleAlpha(value); } }; public int getRippleAlpha() { return paint.getAlpha(); } public void setRippleAlpha(Integer rippleAlpha) { paint.setAlpha(rippleAlpha); invalidate(); } /* * Accessor */ public void setRippleColor(int rippleColor) { this.rippleColor = rippleColor; paint.setColor(rippleColor); paint.setAlpha(rippleAlpha); invalidate(); } public void setRippleOverlay(boolean rippleOverlay) { this.rippleOverlay = rippleOverlay; } public void setRippleDiameter(int rippleDiameter) { this.rippleDiameter = rippleDiameter; } public void setRippleDuration(int rippleDuration) { this.rippleDuration = rippleDuration; } public void setRippleHover(boolean rippleHover) { this.rippleHover = rippleHover; } public void setRippleDelayClick(boolean rippleDelayClick) { this.rippleDelayClick = rippleDelayClick; } public void setRippleFadeDuration(int rippleFadeDuration) { this.rippleFadeDuration = rippleFadeDuration; } public void setRipplePersistent(boolean ripplePersistent) { this.ripplePersistent = ripplePersistent; } public void setRippleInAdapter(boolean rippleInAdapter) { this.rippleInAdapter = rippleInAdapter; } public void setRippleRoundedCorners(int rippleRoundedCorner) { this.rippleRoundedCorners = rippleRoundedCorner; enableClipPathSupportIfNecessary(); } public void setDefaultRippleAlpha(int alpha) { this.rippleAlpha = alpha; paint.setAlpha(alpha); invalidate(); } public void performRipple() { currentCoords = new Point(getWidth() / 2, getHeight() / 2); startRipple(null); } public void performRipple(Point anchor) { currentCoords = new Point(anchor.x, anchor.y); startRipple(null); } /** * {@link Canvas#clipPath(Path)} is not supported in hardware accelerated layers * before API 18. Use software layer instead * <p/> * https://developer.android.com/guide/topics/graphics/hardware-accel.html#unsupported */ private void enableClipPathSupportIfNecessary() { if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.JELLY_BEAN_MR1) { // if (rippleRoundedCorners != 0||getBackground()!=null) { // layerType = getLayerType(); setLayerType(LAYER_TYPE_SOFTWARE, null); // } else { // setLayerType(layerType, null); // } } } /* * Helper */ private class PerformClickEvent implements Runnable { @Override public void run() { if (mHasPerformedLongPress) return; // if parent is an AdapterView, try to call its ItemClickListener if (getParent() instanceof AdapterView) { clickAdapterView((AdapterView) getParent()); } else if (rippleInAdapter) { // find adapter view clickAdapterView(findParentAdapterView()); } else { // otherwise, just perform click on child childView.performClick(); } } private void clickAdapterView(AdapterView parent) { final int position = parent.getPositionForView(MaterialRippleLayout.this); final long itemId = parent.getAdapter() != null ? parent.getAdapter().getItemId(position) : 0; if (position != AdapterView.INVALID_POSITION) { parent.performItemClick(MaterialRippleLayout.this, position, itemId); } } } private final class PressedEvent implements Runnable { private final MotionEvent event; public PressedEvent(MotionEvent event) { this.event = event; } @Override public void run() { prepressed = false; childView.setLongClickable(false);//prevent the child's long click,let's the ripple layout call it's performLongClick childView.onTouchEvent(event); childView.setPressed(true); if (rippleHover) { startHover(); } } } static float dpToPx(Resources resources, float dp) { return TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dp, resources.getDisplayMetrics()); } /* * Builder */ public static class RippleBuilder { private final Context context; private final View child; private int rippleColor = DEFAULT_COLOR; private boolean rippleOverlay = DEFAULT_RIPPLE_OVERLAY; private boolean rippleHover = DEFAULT_HOVER; private float rippleDiameter = DEFAULT_DIAMETER_DP; private int rippleDuration = DEFAULT_DURATION; private float rippleAlpha = DEFAULT_ALPHA; private boolean rippleDelayClick = DEFAULT_DELAY_CLICK; private int rippleFadeDuration = DEFAULT_FADE_DURATION; private boolean ripplePersistent = DEFAULT_PERSISTENT; private boolean rippleSearchAdapter = DEFAULT_SEARCH_ADAPTER; private float rippleRoundedCorner = DEFAULT_ROUNDED_CORNERS; public RippleBuilder(View child) { this.child = child; this.context = child.getContext(); } public RippleBuilder rippleColor(int color) { this.rippleColor = color; return this; } public RippleBuilder rippleOverlay(boolean overlay) { this.rippleOverlay = overlay; return this; } public RippleBuilder rippleHover(boolean hover) { this.rippleHover = hover; return this; } public RippleBuilder rippleDiameterDp(int diameterDp) { this.rippleDiameter = diameterDp; return this; } public RippleBuilder rippleDuration(int duration) { this.rippleDuration = duration; return this; } public RippleBuilder rippleAlpha(float alpha) { this.rippleAlpha = 255 * alpha; return this; } public RippleBuilder rippleDelayClick(boolean delayClick) { this.rippleDelayClick = delayClick; return this; } public RippleBuilder rippleFadeDuration(int fadeDuration) { this.rippleFadeDuration = fadeDuration; return this; } public RippleBuilder ripplePersistent(boolean persistent) { this.ripplePersistent = persistent; return this; } public RippleBuilder rippleInAdapter(boolean inAdapter) { this.rippleSearchAdapter = inAdapter; return this; } public RippleBuilder rippleRoundedCorners(int radiusDp) { this.rippleRoundedCorner = radiusDp; return this; } public MaterialRippleLayout create() { MaterialRippleLayout layout = new MaterialRippleLayout(context); layout.setRippleColor(rippleColor); layout.setDefaultRippleAlpha((int) rippleAlpha); layout.setRippleDelayClick(rippleDelayClick); layout.setRippleDiameter((int) dpToPx(context.getResources(), rippleDiameter)); layout.setRippleDuration(rippleDuration); layout.setRippleFadeDuration(rippleFadeDuration); layout.setRippleHover(rippleHover); layout.setRipplePersistent(ripplePersistent); layout.setRippleOverlay(rippleOverlay); layout.setRippleInAdapter(rippleSearchAdapter); layout.setRippleRoundedCorners((int) dpToPx(context.getResources(), rippleRoundedCorner)); ViewGroup.LayoutParams params = child.getLayoutParams(); ViewGroup parent = (ViewGroup) child.getParent(); int index = 0; if (parent != null && parent instanceof MaterialRippleLayout) { throw new IllegalStateException("MaterialRippleLayout could not be created: parent of the view already is a MaterialRippleLayout"); } if (parent != null) { index = parent.indexOfChild(child); parent.removeView(child); } layout.addView(child, new ViewGroup.LayoutParams(MATCH_PARENT, MATCH_PARENT)); if (parent != null) { parent.addView(layout, index, params); } return layout; } } public static void setEnableRipple(boolean enableRipple) { MaterialRippleLayout.enableRipple = enableRipple; } }
apache-2.0
sarl/sarl
main/coreplugins/io.sarl.lang/src-gen/io/sarl/lang/codebuilder/appenders/SarlClassSourceAppender.java
7328
/* * $Id$ * * File is automatically generated by the Xtext language generator. * Do not change it. * * SARL is an general-purpose agent programming language. * More details on http://www.sarl.io * * Copyright (C) 2014-2021 the original authors or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sarl.lang.codebuilder.appenders; import io.sarl.lang.codebuilder.builders.ISarlActionBuilder; import io.sarl.lang.codebuilder.builders.ISarlAnnotationTypeBuilder; import io.sarl.lang.codebuilder.builders.ISarlClassBuilder; import io.sarl.lang.codebuilder.builders.ISarlConstructorBuilder; import io.sarl.lang.codebuilder.builders.ISarlEnumerationBuilder; import io.sarl.lang.codebuilder.builders.ISarlFieldBuilder; import io.sarl.lang.codebuilder.builders.ISarlInterfaceBuilder; import io.sarl.lang.codebuilder.builders.ITypeParameterBuilder; import io.sarl.lang.sarl.SarlClass; import io.sarl.lang.sarl.SarlScript; import java.io.IOException; import org.eclipse.emf.common.notify.Notifier; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.xtend.core.xtend.XtendTypeDeclaration; import org.eclipse.xtext.common.types.JvmParameterizedTypeReference; import org.eclipse.xtext.common.types.access.IJvmTypeProvider; import org.eclipse.xtext.xbase.compiler.ISourceAppender; import org.eclipse.xtext.xbase.lib.Pure; /** Source adapter of a Sarl SarlClass. */ @SuppressWarnings("all") public class SarlClassSourceAppender extends AbstractSourceAppender implements ISarlClassBuilder { private final ISarlClassBuilder builder; public SarlClassSourceAppender(ISarlClassBuilder builder) { this.builder = builder; } public void build(ISourceAppender appender) throws IOException { build(this.builder.getSarlClass(), appender); } /** Find the reference to the type with the given name. * @param typeName the fully qualified name of the type * @return the type reference. */ public JvmParameterizedTypeReference newTypeRef(String typeName) { return this.builder.newTypeRef(typeName); } /** Find the reference to the type with the given name. * @param context the context for the type reference use * @param typeName the fully qualified name of the type * @return the type reference. */ public JvmParameterizedTypeReference newTypeRef(Notifier context, String typeName) { return this.builder.newTypeRef(context, typeName); } public IJvmTypeProvider getTypeResolutionContext() { return this.builder.getTypeResolutionContext(); } /** Dispose the resource. */ public void dispose() { this.builder.dispose(); } @Override @Pure public String toString() { return this.builder.toString(); } /** Initialize the Ecore element when inside a script. */ public void eInit(SarlScript script, String name, IJvmTypeProvider context) { this.builder.eInit(script, name, context); } /** Initialize the Ecore element when inner type declaration. */ public void eInit(XtendTypeDeclaration container, String name, IJvmTypeProvider context) { this.builder.eInit(container, name, context); } /** Replies the generated SarlClass. */ @Pure public SarlClass getSarlClass() { return this.builder.getSarlClass(); } /** Replies the resource to which the SarlClass is attached. */ @Pure public Resource eResource() { return getSarlClass().eResource(); } /** Change the documentation of the element. * * <p>The documentation will be displayed just before the element. * * @param doc the documentation. */ public void setDocumentation(String doc) { this.builder.setDocumentation(doc); } /** Change the super type. * @param superType the qualified name of the super type, * or {@code null} if the default type. */ public void setExtends(String superType) { this.builder.setExtends(superType); } /** Add an implemented type. * @param type the qualified name of the implemented type. */ public void addImplements(String type) { this.builder.addImplements(type); } /** Add a modifier. * @param modifier the modifier to add. */ public void addModifier(String modifier) { this.builder.addModifier(modifier); } /** Add a type parameter. * @param name the simple name of the type parameter. * @return the builder of type parameter. */ public ITypeParameterBuilder addTypeParameter(String name) { return this.builder.addTypeParameter(name); } /** Create a SarlConstructor. * @return the builder. */ public ISarlConstructorBuilder addSarlConstructor() { return this.builder.addSarlConstructor(); } /** Create a SarlField. * @param name the name of the SarlField. * @return the builder. */ public ISarlFieldBuilder addVarSarlField(String name) { return this.builder.addVarSarlField(name); } /** Create a SarlField. * @param name the name of the SarlField. * @return the builder. */ public ISarlFieldBuilder addValSarlField(String name) { return this.builder.addValSarlField(name); } /** Create a SarlField. * * <p>This function is equivalent to {@link #addVarSarlField}. * @param name the name of the SarlField. * @return the builder. */ public ISarlFieldBuilder addSarlField(String name) { return this.builder.addSarlField(name); } /** Create a SarlAction. * @param name the name of the SarlAction. * @return the builder. */ public ISarlActionBuilder addDefSarlAction(String name) { return this.builder.addDefSarlAction(name); } /** Create a SarlAction. * @param name the name of the SarlAction. * @return the builder. */ public ISarlActionBuilder addOverrideSarlAction(String name) { return this.builder.addOverrideSarlAction(name); } /** Create a SarlAction. * * <p>This function is equivalent to {@link #addDefSarlAction}. * @param name the name of the SarlAction. * @return the builder. */ public ISarlActionBuilder addSarlAction(String name) { return this.builder.addSarlAction(name); } /** Create a SarlClass. * @param name the name of the SarlClass. * @return the builder. */ public ISarlClassBuilder addSarlClass(String name) { return this.builder.addSarlClass(name); } /** Create a SarlInterface. * @param name the name of the SarlInterface. * @return the builder. */ public ISarlInterfaceBuilder addSarlInterface(String name) { return this.builder.addSarlInterface(name); } /** Create a SarlEnumeration. * @param name the name of the SarlEnumeration. * @return the builder. */ public ISarlEnumerationBuilder addSarlEnumeration(String name) { return this.builder.addSarlEnumeration(name); } /** Create a SarlAnnotationType. * @param name the name of the SarlAnnotationType. * @return the builder. */ public ISarlAnnotationTypeBuilder addSarlAnnotationType(String name) { return this.builder.addSarlAnnotationType(name); } }
apache-2.0
ebyhr/presto
core/trino-main/src/main/java/io/trino/execution/SqlQueryManager.java
11329
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.execution; import com.google.common.collect.Ordering; import com.google.common.util.concurrent.ListenableFuture; import io.airlift.concurrent.ThreadPoolExecutorMBean; import io.airlift.log.Logger; import io.airlift.units.DataSize; import io.airlift.units.Duration; import io.trino.ExceededCpuLimitException; import io.trino.ExceededScanLimitException; import io.trino.Session; import io.trino.execution.QueryExecution.QueryOutputInfo; import io.trino.execution.StateMachine.StateChangeListener; import io.trino.memory.ClusterMemoryManager; import io.trino.server.BasicQueryInfo; import io.trino.server.protocol.Slug; import io.trino.spi.QueryId; import io.trino.spi.TrinoException; import io.trino.sql.planner.Plan; import org.weakref.jmx.Managed; import org.weakref.jmx.Nested; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.annotation.concurrent.ThreadSafe; import javax.inject.Inject; import java.util.List; import java.util.NoSuchElementException; import java.util.Objects; import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.util.concurrent.Futures.immediateFailedFuture; import static io.airlift.concurrent.Threads.threadsNamed; import static io.trino.SystemSessionProperties.getQueryMaxCpuTime; import static io.trino.SystemSessionProperties.getQueryMaxScanPhysicalBytes; import static io.trino.execution.QueryState.RUNNING; import static io.trino.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.Executors.newScheduledThreadPool; @ThreadSafe public class SqlQueryManager implements QueryManager { private static final Logger log = Logger.get(SqlQueryManager.class); private final ClusterMemoryManager memoryManager; private final QueryTracker<QueryExecution> queryTracker; private final Duration maxQueryCpuTime; private final Optional<DataSize> maxQueryScanPhysicalBytes; private final ExecutorService queryExecutor; private final ThreadPoolExecutorMBean queryExecutorMBean; private final ScheduledExecutorService queryManagementExecutor; private final ThreadPoolExecutorMBean queryManagementExecutorMBean; @Inject public SqlQueryManager(ClusterMemoryManager memoryManager, QueryManagerConfig queryManagerConfig) { this.memoryManager = requireNonNull(memoryManager, "memoryManager is null"); this.maxQueryCpuTime = queryManagerConfig.getQueryMaxCpuTime(); this.maxQueryScanPhysicalBytes = queryManagerConfig.getQueryMaxScanPhysicalBytes(); this.queryExecutor = newCachedThreadPool(threadsNamed("query-scheduler-%s")); this.queryExecutorMBean = new ThreadPoolExecutorMBean((ThreadPoolExecutor) queryExecutor); this.queryManagementExecutor = newScheduledThreadPool(queryManagerConfig.getQueryManagerExecutorPoolSize(), threadsNamed("query-management-%s")); this.queryManagementExecutorMBean = new ThreadPoolExecutorMBean((ThreadPoolExecutor) queryManagementExecutor); this.queryTracker = new QueryTracker<>(queryManagerConfig, queryManagementExecutor); } @PostConstruct public void start() { queryTracker.start(); queryManagementExecutor.scheduleWithFixedDelay(() -> { try { enforceMemoryLimits(); } catch (Throwable e) { log.error(e, "Error enforcing memory limits"); } try { enforceCpuLimits(); } catch (Throwable e) { log.error(e, "Error enforcing query CPU time limits"); } try { enforceScanLimits(); } catch (Throwable e) { log.error(e, "Error enforcing query scan bytes limits"); } }, 1, 1, TimeUnit.SECONDS); } @PreDestroy public void stop() { queryTracker.stop(); queryManagementExecutor.shutdownNow(); queryExecutor.shutdownNow(); } @Override public List<BasicQueryInfo> getQueries() { return queryTracker.getAllQueries().stream() .map(queryExecution -> { try { return queryExecution.getBasicQueryInfo(); } catch (RuntimeException ignored) { return null; } }) .filter(Objects::nonNull) .collect(toImmutableList()); } @Override public void addOutputInfoListener(QueryId queryId, Consumer<QueryOutputInfo> listener) { requireNonNull(listener, "listener is null"); queryTracker.getQuery(queryId).addOutputInfoListener(listener); } @Override public void outputTaskFailed(TaskId taskId, Throwable failure) { queryTracker.getQuery(taskId.getQueryId()).outputTaskFailed(taskId, failure); } @Override public void addStateChangeListener(QueryId queryId, StateChangeListener<QueryState> listener) { requireNonNull(listener, "listener is null"); queryTracker.getQuery(queryId).addStateChangeListener(listener); } @Override public ListenableFuture<QueryState> getStateChange(QueryId queryId, QueryState currentState) { return queryTracker.tryGetQuery(queryId) .map(query -> query.getStateChange(currentState)) .orElseGet(() -> immediateFailedFuture(new NoSuchElementException())); } @Override public BasicQueryInfo getQueryInfo(QueryId queryId) { return queryTracker.getQuery(queryId).getBasicQueryInfo(); } @Override public QueryInfo getFullQueryInfo(QueryId queryId) throws NoSuchElementException { return queryTracker.getQuery(queryId).getQueryInfo(); } @Override public Session getQuerySession(QueryId queryId) throws NoSuchElementException { return queryTracker.getQuery(queryId).getSession(); } @Override public Slug getQuerySlug(QueryId queryId) { return queryTracker.getQuery(queryId).getSlug(); } public Plan getQueryPlan(QueryId queryId) { return queryTracker.getQuery(queryId).getQueryPlan(); } public void addFinalQueryInfoListener(QueryId queryId, StateChangeListener<QueryInfo> stateChangeListener) { queryTracker.getQuery(queryId).addFinalQueryInfoListener(stateChangeListener); } @Override public QueryState getQueryState(QueryId queryId) { return queryTracker.getQuery(queryId).getState(); } @Override public void recordHeartbeat(QueryId queryId) { queryTracker.tryGetQuery(queryId) .ifPresent(QueryExecution::recordHeartbeat); } @Override public void createQuery(QueryExecution queryExecution) { requireNonNull(queryExecution, "queryExecution is null"); if (!queryTracker.addQuery(queryExecution)) { throw new TrinoException(GENERIC_INTERNAL_ERROR, format("Query %s already registered", queryExecution.getQueryId())); } queryExecution.addFinalQueryInfoListener(finalQueryInfo -> { // execution MUST be added to the expiration queue or there will be a leak queryTracker.expireQuery(queryExecution.getQueryId()); }); queryExecution.start(); } @Override public void failQuery(QueryId queryId, Throwable cause) { requireNonNull(cause, "cause is null"); queryTracker.tryGetQuery(queryId) .ifPresent(query -> query.fail(cause)); } @Override public void cancelQuery(QueryId queryId) { log.debug("Cancel query %s", queryId); queryTracker.tryGetQuery(queryId) .ifPresent(QueryExecution::cancelQuery); } @Override public void cancelStage(StageId stageId) { requireNonNull(stageId, "stageId is null"); log.debug("Cancel stage %s", stageId); queryTracker.tryGetQuery(stageId.getQueryId()) .ifPresent(query -> query.cancelStage(stageId)); } @Managed(description = "Query scheduler executor") @Nested public ThreadPoolExecutorMBean getExecutor() { return queryExecutorMBean; } @Managed(description = "Query query management executor") @Nested public ThreadPoolExecutorMBean getManagementExecutor() { return queryManagementExecutorMBean; } /** * Enforce memory limits at the query level */ private void enforceMemoryLimits() { List<QueryExecution> runningQueries = queryTracker.getAllQueries().stream() .filter(query -> query.getState() == RUNNING) .collect(toImmutableList()); memoryManager.process(runningQueries, this::getQueries); } /** * Enforce query CPU time limits */ private void enforceCpuLimits() { for (QueryExecution query : queryTracker.getAllQueries()) { Duration cpuTime = query.getTotalCpuTime(); Duration sessionLimit = getQueryMaxCpuTime(query.getSession()); Duration limit = Ordering.natural().min(maxQueryCpuTime, sessionLimit); if (cpuTime.compareTo(limit) > 0) { query.fail(new ExceededCpuLimitException(limit)); } } } /** * Enforce query scan physical bytes limits */ private void enforceScanLimits() { for (QueryExecution query : queryTracker.getAllQueries()) { Optional<DataSize> limitOpt = getQueryMaxScanPhysicalBytes(query.getSession()); if (maxQueryScanPhysicalBytes.isPresent()) { limitOpt = limitOpt .flatMap(sessionLimit -> maxQueryScanPhysicalBytes.map(serverLimit -> Ordering.natural().min(serverLimit, sessionLimit))) .or(() -> maxQueryScanPhysicalBytes); } limitOpt.ifPresent(limit -> { DataSize scan = query.getBasicQueryInfo().getQueryStats().getPhysicalInputDataSize(); if (scan.compareTo(limit) > 0) { query.fail(new ExceededScanLimitException(limit)); } }); } } }
apache-2.0
junchenChow/exciting-app
app/src/main/java/me/vociegif/android/mvp/Presenter.java
290
package me.vociegif.android.mvp; import android.support.annotation.NonNull; /** * Created by Yoh Asakura. * * @version 1.0 * * Address : https://github.com/junchenChow */ public interface Presenter<V> { void bindView(@NonNull V view); void destroy(); void start(); }
apache-2.0
scalingdata/goavro
helpers_test.go
3223
// Copyright 2015 LinkedIn Corp. Licensed under the Apache License, // Version 2.0 (the "License"); you may not use this file except in // compliance with the License.
 You may obtain a copy of the License // at http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // 
distributed under the License is distributed on an "AS IS" BASIS, // 
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied.Copyright [201X] LinkedIn Corp. Licensed under the Apache // License, Version 2.0 (the "License"); you may not use this file // except in compliance with the License.
 You may obtain a copy of // the License at http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // 
distributed under the License is distributed on an "AS IS" BASIS, // 
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. package goavro import ( "bytes" "fmt" "io" "strings" "testing" ) type testBuffer interface { io.ReadWriter Bytes() []byte } // A byte buffer for testing that fulfills io.ReadWriter, but can't be // upcast to ByteWriter or StringWriter type simpleBuffer struct { buf bytes.Buffer } func (self *simpleBuffer) Write(b []byte) (n int, err error) { return self.buf.Write(b) } func (self *simpleBuffer) Bytes() []byte { return self.buf.Bytes() } func (self *simpleBuffer) Read(p []byte) (n int, err error) { return self.buf.Read(p) } func checkError(t *testing.T, actualError error, expectedError interface{}) { if expectedError == nil { if actualError != nil { t.Errorf("Actual: %#v; Expected: %#v", actualError.Error(), expectedError) } } else { if actualError == nil { t.Errorf("Actual: %#v; Expected: %#v", actualError, expectedError) } else { var expected error switch expectedError.(type) { case string: expected = fmt.Errorf(expectedError.(string)) case error: expected = expectedError.(error) } if !strings.Contains(actualError.Error(), expected.Error()) { t.Errorf("Actual: %#v; Expected to contain: %#v", actualError.Error(), expected.Error()) } } } } func checkErrorFatal(t *testing.T, actualError error, expectedError interface{}) { if expectedError == nil { if actualError != nil { t.Fatalf("Actual: %#v; Expected: %#v", actualError.Error(), expectedError) } } else { if actualError == nil { t.Fatalf("Actual: %#v; Expected: %#v", actualError, expectedError) } else { var expected error switch expectedError.(type) { case string: expected = fmt.Errorf(expectedError.(string)) case error: expected = expectedError.(error) } if !strings.Contains(actualError.Error(), expected.Error()) { t.Fatalf("Actual: %#v; Expected to contain: %#v", actualError.Error(), expected.Error()) } } } } func checkResponse(t *testing.T, bb *bytes.Buffer, n int, expectedBytes []byte) { expectedCount := len(expectedBytes) if n != expectedCount { t.Errorf("Actual: %#v; Expected: %#v", n, expectedCount) } if bytes.Compare(bb.Bytes(), expectedBytes) != 0 { t.Errorf("Actual: %#v; Expected: %#v", bb.Bytes(), expectedBytes) } }
apache-2.0
boalang/compiler
src/compiled-proto/boa/types/Diff.java
99398
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: diff.proto package boa.types; public final class Diff { private Diff() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface ChangedFileOrBuilder extends com.google.protobuf.MessageOrBuilder { // required .boa.types.ChangeKind change = 1; /** * <code>required .boa.types.ChangeKind change = 1;</code> * * <pre> ** The kind of change for this file * </pre> */ boolean hasChange(); /** * <code>required .boa.types.ChangeKind change = 1;</code> * * <pre> ** The kind of change for this file * </pre> */ boa.types.Shared.ChangeKind getChange(); // required .boa.types.ChangedFile.FileKind kind = 2; /** * <code>required .boa.types.ChangedFile.FileKind kind = 2;</code> * * <pre> ** The kind of file * </pre> */ boolean hasKind(); /** * <code>required .boa.types.ChangedFile.FileKind kind = 2;</code> * * <pre> ** The kind of file * </pre> */ boa.types.Diff.ChangedFile.FileKind getKind(); // required string name = 3; /** * <code>required string name = 3;</code> * * <pre> ** The full name and path of the file * </pre> */ boolean hasName(); /** * <code>required string name = 3;</code> * * <pre> ** The full name and path of the file * </pre> */ java.lang.String getName(); /** * <code>required string name = 3;</code> * * <pre> ** The full name and path of the file * </pre> */ com.google.protobuf.ByteString getNameBytes(); // required uint64 key = 4; /** * <code>required uint64 key = 4;</code> * * <pre> ** @exclude * </pre> */ boolean hasKey(); /** * <code>required uint64 key = 4;</code> * * <pre> ** @exclude * </pre> */ long getKey(); // required bool ast = 5; /** * <code>required bool ast = 5;</code> * * <pre> ** @exclude Indicates if this file has a corresponding parsed AST or not * </pre> */ boolean hasAst(); /** * <code>required bool ast = 5;</code> * * <pre> ** @exclude Indicates if this file has a corresponding parsed AST or not * </pre> */ boolean getAst(); // optional .boa.types.CommentsRoot comments = 6; /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ boolean hasComments(); /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ boa.types.Ast.CommentsRoot getComments(); /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ boa.types.Ast.CommentsRootOrBuilder getCommentsOrBuilder(); // repeated .boa.types.ChangeKind changes = 7; /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ java.util.List<boa.types.Shared.ChangeKind> getChangesList(); /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ int getChangesCount(); /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ boa.types.Shared.ChangeKind getChanges(int index); // repeated string previous_names = 8; /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ java.util.List<java.lang.String> getPreviousNamesList(); /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ int getPreviousNamesCount(); /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ java.lang.String getPreviousNames(int index); /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ com.google.protobuf.ByteString getPreviousNamesBytes(int index); // repeated int32 previous_versions = 9; /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ java.util.List<java.lang.Integer> getPreviousVersionsList(); /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ int getPreviousVersionsCount(); /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ int getPreviousVersions(int index); // repeated int32 previous_indices = 10; /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ java.util.List<java.lang.Integer> getPreviousIndicesList(); /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ int getPreviousIndicesCount(); /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ int getPreviousIndices(int index); } /** * Protobuf type {@code boa.types.ChangedFile} * * <pre> ** A file committed in a Revision * </pre> */ public static final class ChangedFile extends com.google.protobuf.GeneratedMessage implements ChangedFileOrBuilder { // Use ChangedFile.newBuilder() to construct. private ChangedFile(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private ChangedFile(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final ChangedFile defaultInstance; public static ChangedFile getDefaultInstance() { return defaultInstance; } public ChangedFile getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ChangedFile( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { int rawValue = input.readEnum(); boa.types.Shared.ChangeKind value = boa.types.Shared.ChangeKind.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(1, rawValue); } else { bitField0_ |= 0x00000001; change_ = value; } break; } case 16: { int rawValue = input.readEnum(); boa.types.Diff.ChangedFile.FileKind value = boa.types.Diff.ChangedFile.FileKind.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; kind_ = value; } break; } case 26: { bitField0_ |= 0x00000004; name_ = input.readBytes(); break; } case 32: { bitField0_ |= 0x00000008; key_ = input.readUInt64(); break; } case 40: { bitField0_ |= 0x00000010; ast_ = input.readBool(); break; } case 50: { boa.types.Ast.CommentsRoot.Builder subBuilder = null; if (((bitField0_ & 0x00000020) == 0x00000020)) { subBuilder = comments_.toBuilder(); } comments_ = input.readMessage(boa.types.Ast.CommentsRoot.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(comments_); comments_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000020; break; } case 56: { int rawValue = input.readEnum(); boa.types.Shared.ChangeKind value = boa.types.Shared.ChangeKind.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(7, rawValue); } else { if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { changes_ = new java.util.ArrayList<boa.types.Shared.ChangeKind>(); mutable_bitField0_ |= 0x00000040; } changes_.add(value); } break; } case 58: { int length = input.readRawVarint32(); int oldLimit = input.pushLimit(length); while(input.getBytesUntilLimit() > 0) { int rawValue = input.readEnum(); boa.types.Shared.ChangeKind value = boa.types.Shared.ChangeKind.valueOf(rawValue); if (value == null) { unknownFields.mergeVarintField(7, rawValue); } else { if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { changes_ = new java.util.ArrayList<boa.types.Shared.ChangeKind>(); mutable_bitField0_ |= 0x00000040; } changes_.add(value); } } input.popLimit(oldLimit); break; } case 66: { if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) { previousNames_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000080; } previousNames_.add(input.readBytes()); break; } case 72: { if (!((mutable_bitField0_ & 0x00000100) == 0x00000100)) { previousVersions_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000100; } previousVersions_.add(input.readInt32()); break; } case 74: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000100) == 0x00000100) && input.getBytesUntilLimit() > 0) { previousVersions_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000100; } while (input.getBytesUntilLimit() > 0) { previousVersions_.add(input.readInt32()); } input.popLimit(limit); break; } case 80: { if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) { previousIndices_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000200; } previousIndices_.add(input.readInt32()); break; } case 82: { int length = input.readRawVarint32(); int limit = input.pushLimit(length); if (!((mutable_bitField0_ & 0x00000200) == 0x00000200) && input.getBytesUntilLimit() > 0) { previousIndices_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000200; } while (input.getBytesUntilLimit() > 0) { previousIndices_.add(input.readInt32()); } input.popLimit(limit); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { changes_ = java.util.Collections.unmodifiableList(changes_); } if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) { previousNames_ = new com.google.protobuf.UnmodifiableLazyStringList(previousNames_); } if (((mutable_bitField0_ & 0x00000100) == 0x00000100)) { previousVersions_ = java.util.Collections.unmodifiableList(previousVersions_); } if (((mutable_bitField0_ & 0x00000200) == 0x00000200)) { previousIndices_ = java.util.Collections.unmodifiableList(previousIndices_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return boa.types.Diff.internal_static_boa_types_ChangedFile_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return boa.types.Diff.internal_static_boa_types_ChangedFile_fieldAccessorTable .ensureFieldAccessorsInitialized( boa.types.Diff.ChangedFile.class, boa.types.Diff.ChangedFile.Builder.class); } public static com.google.protobuf.Parser<ChangedFile> PARSER = new com.google.protobuf.AbstractParser<ChangedFile>() { public ChangedFile parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ChangedFile(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<ChangedFile> getParserForType() { return PARSER; } /** * Protobuf enum {@code boa.types.ChangedFile.FileKind} * * <pre> ** Describes the kind of the file * </pre> */ public enum FileKind implements com.google.protobuf.ProtocolMessageEnum { /** * <code>OTHER = 0;</code> * * <pre> ** The file's type was unknown * </pre> */ OTHER(0, 0), /** * <code>BINARY = 1;</code> * * <pre> ** The file represents a binary file * </pre> */ BINARY(1, 1), /** * <code>TEXT = 2;</code> * * <pre> ** The file represents a text file * </pre> */ TEXT(2, 2), /** * <code>XML = 3;</code> * * <pre> ** The file represents an XML file * </pre> */ XML(3, 3), /** * <code>SOURCE_JAVA_ERROR = 100;</code> * * <pre> ** The file represents a Java source file that had a parse error * </pre> */ SOURCE_JAVA_ERROR(4, 100), /** * <code>SOURCE_JAVA_JLS2 = 102;</code> * * <pre> ** The file represents a Java source file that parsed without error as JLS2 * </pre> */ SOURCE_JAVA_JLS2(5, 102), /** * <code>SOURCE_JAVA_JLS3 = 103;</code> * * <pre> ** The file represents a Java source file that parsed without error as JLS3 * </pre> */ SOURCE_JAVA_JLS3(6, 103), /** * <code>SOURCE_JAVA_JLS4 = 104;</code> * * <pre> ** The file represents a Java source file that parsed without error as JLS4 * </pre> */ SOURCE_JAVA_JLS4(7, 104), /** * <code>SOURCE_JAVA_JLS8 = 108;</code> * * <pre> ** The file represents a Java source file that parsed without error as JLS8 * </pre> */ SOURCE_JAVA_JLS8(8, 108), /** * <code>SOURCE_CS_ERROR = 200;</code> * * <pre> ** @exclude TODO * </pre> */ SOURCE_CS_ERROR(14, 200), /** * <code>SOURCE_CS_CS1 = 201;</code> * * <pre> ** @exclude TODO * </pre> */ SOURCE_CS_CS1(15, 201), /** * <code>SOURCE_CS_CS2 = 202;</code> * * <pre> ** @exclude TODO * </pre> */ SOURCE_CS_CS2(16, 202), /** * <code>SOURCE_CS_CS3 = 203;</code> * * <pre> ** @exclude TODO * </pre> */ SOURCE_CS_CS3(17, 203), /** * <code>SOURCE_CS_CS4 = 204;</code> * * <pre> ** @exclude TODO * </pre> */ SOURCE_CS_CS4(18, 204), /** * <code>SOURCE_CS_CS5 = 205;</code> * * <pre> ** @exclude TODO * </pre> */ SOURCE_CS_CS5(19, 205), /** * <code>SOURCE_JS_ERROR = 300;</code> * * <pre> ** The file represents a JavaScript source file that had a parse error * </pre> */ SOURCE_JS_ERROR(26, 300), /** * <code>SOURCE_JS_ES1 = 301;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES1 * </pre> */ SOURCE_JS_ES1(27, 301), /** * <code>SOURCE_JS_ES2 = 302;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES2 * </pre> */ SOURCE_JS_ES2(28, 302), /** * <code>SOURCE_JS_ES3 = 303;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES3 * </pre> */ SOURCE_JS_ES3(29, 303), /** * <code>SOURCE_JS_ES5 = 304;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES5 * </pre> */ SOURCE_JS_ES5(30, 304), /** * <code>SOURCE_JS_ES6 = 305;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES6 * </pre> */ SOURCE_JS_ES6(31, 305), /** * <code>SOURCE_JS_ES7 = 306;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES7 * </pre> */ SOURCE_JS_ES7(32, 306), /** * <code>SOURCE_JS_ES8 = 307;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES8 * </pre> */ SOURCE_JS_ES8(33, 307), /** * <code>SOURCE_PHP_ERROR = 400;</code> * * <pre> ** The file represents a PHP source file that had a parse error * </pre> */ SOURCE_PHP_ERROR(35, 400), /** * <code>SOURCE_PHP5 = 401;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES1 * </pre> */ SOURCE_PHP5(36, 401), /** * <code>SOURCE_PHP5_3 = 402;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES2 * </pre> */ SOURCE_PHP5_3(37, 402), /** * <code>SOURCE_PHP5_4 = 403;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES3 * </pre> */ SOURCE_PHP5_4(38, 403), /** * <code>SOURCE_PHP5_5 = 404;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES5 * </pre> */ SOURCE_PHP5_5(39, 404), /** * <code>SOURCE_PHP5_6 = 405;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES6 * </pre> */ SOURCE_PHP5_6(40, 405), /** * <code>SOURCE_PHP7_0 = 406;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES7 * </pre> */ SOURCE_PHP7_0(41, 406), /** * <code>SOURCE_PHP7_1 = 407;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES8 * </pre> */ SOURCE_PHP7_1(42, 407), /** * <code>SOURCE_HTML_ERROR = 500;</code> * * <pre> ** The file represents an HTML source file that had a parse error * </pre> */ SOURCE_HTML_ERROR(44, 500), /** * <code>Source_HTML = 501;</code> * * <pre> ** The file represents an HTML source file that parsed without error * </pre> */ Source_HTML(45, 501), /** * <code>SOURCE_XML_ERROR = 600;</code> * * <pre> ** The file represents an XML source file that had a parse error * </pre> */ SOURCE_XML_ERROR(47, 600), /** * <code>Source_XML = 601;</code> * * <pre> ** The file represents an HTML source file that parsed without error * </pre> */ Source_XML(48, 601), /** * <code>SOURCE_CSS_ERROR = 700;</code> * * <pre> ** The file represents an CSS source file that had a parse error * </pre> */ SOURCE_CSS_ERROR(50, 700), /** * <code>Source_CSS = 701;</code> * * <pre> ** The file represents an CSS source file that parsed without error * </pre> */ Source_CSS(51, 701), ; /** * <code>JAVA_ERROR = 100;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind JAVA_ERROR = SOURCE_JAVA_ERROR; /** * <code>JLS2 = 102;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind JLS2 = SOURCE_JAVA_JLS2; /** * <code>JLS3 = 103;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind JLS3 = SOURCE_JAVA_JLS3; /** * <code>JLS4 = 104;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind JLS4 = SOURCE_JAVA_JLS4; /** * <code>JLS8 = 108;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind JLS8 = SOURCE_JAVA_JLS8; /** * <code>CS_ERROR = 200;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind CS_ERROR = SOURCE_CS_ERROR; /** * <code>CS1 = 201;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind CS1 = SOURCE_CS_CS1; /** * <code>CS2 = 202;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind CS2 = SOURCE_CS_CS2; /** * <code>CS3 = 203;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind CS3 = SOURCE_CS_CS3; /** * <code>CS4 = 204;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind CS4 = SOURCE_CS_CS4; /** * <code>CS5 = 205;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind CS5 = SOURCE_CS_CS5; /** * <code>JS_ERROR = 300;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind JS_ERROR = SOURCE_JS_ERROR; /** * <code>PHP_ERROR = 400;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind PHP_ERROR = SOURCE_PHP_ERROR; /** * <code>HTML_ERROR = 500;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind HTML_ERROR = SOURCE_HTML_ERROR; /** * <code>XML_ERROR = 600;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind XML_ERROR = SOURCE_XML_ERROR; /** * <code>CSS_ERROR = 700;</code> * * <pre> ** @exclude * </pre> */ public static final FileKind CSS_ERROR = SOURCE_CSS_ERROR; /** * <code>OTHER = 0;</code> * * <pre> ** The file's type was unknown * </pre> */ public static final int OTHER_VALUE = 0; /** * <code>BINARY = 1;</code> * * <pre> ** The file represents a binary file * </pre> */ public static final int BINARY_VALUE = 1; /** * <code>TEXT = 2;</code> * * <pre> ** The file represents a text file * </pre> */ public static final int TEXT_VALUE = 2; /** * <code>XML = 3;</code> * * <pre> ** The file represents an XML file * </pre> */ public static final int XML_VALUE = 3; /** * <code>SOURCE_JAVA_ERROR = 100;</code> * * <pre> ** The file represents a Java source file that had a parse error * </pre> */ public static final int SOURCE_JAVA_ERROR_VALUE = 100; /** * <code>SOURCE_JAVA_JLS2 = 102;</code> * * <pre> ** The file represents a Java source file that parsed without error as JLS2 * </pre> */ public static final int SOURCE_JAVA_JLS2_VALUE = 102; /** * <code>SOURCE_JAVA_JLS3 = 103;</code> * * <pre> ** The file represents a Java source file that parsed without error as JLS3 * </pre> */ public static final int SOURCE_JAVA_JLS3_VALUE = 103; /** * <code>SOURCE_JAVA_JLS4 = 104;</code> * * <pre> ** The file represents a Java source file that parsed without error as JLS4 * </pre> */ public static final int SOURCE_JAVA_JLS4_VALUE = 104; /** * <code>SOURCE_JAVA_JLS8 = 108;</code> * * <pre> ** The file represents a Java source file that parsed without error as JLS8 * </pre> */ public static final int SOURCE_JAVA_JLS8_VALUE = 108; /** * <code>JAVA_ERROR = 100;</code> * * <pre> ** @exclude * </pre> */ public static final int JAVA_ERROR_VALUE = 100; /** * <code>JLS2 = 102;</code> * * <pre> ** @exclude * </pre> */ public static final int JLS2_VALUE = 102; /** * <code>JLS3 = 103;</code> * * <pre> ** @exclude * </pre> */ public static final int JLS3_VALUE = 103; /** * <code>JLS4 = 104;</code> * * <pre> ** @exclude * </pre> */ public static final int JLS4_VALUE = 104; /** * <code>JLS8 = 108;</code> * * <pre> ** @exclude * </pre> */ public static final int JLS8_VALUE = 108; /** * <code>SOURCE_CS_ERROR = 200;</code> * * <pre> ** @exclude TODO * </pre> */ public static final int SOURCE_CS_ERROR_VALUE = 200; /** * <code>SOURCE_CS_CS1 = 201;</code> * * <pre> ** @exclude TODO * </pre> */ public static final int SOURCE_CS_CS1_VALUE = 201; /** * <code>SOURCE_CS_CS2 = 202;</code> * * <pre> ** @exclude TODO * </pre> */ public static final int SOURCE_CS_CS2_VALUE = 202; /** * <code>SOURCE_CS_CS3 = 203;</code> * * <pre> ** @exclude TODO * </pre> */ public static final int SOURCE_CS_CS3_VALUE = 203; /** * <code>SOURCE_CS_CS4 = 204;</code> * * <pre> ** @exclude TODO * </pre> */ public static final int SOURCE_CS_CS4_VALUE = 204; /** * <code>SOURCE_CS_CS5 = 205;</code> * * <pre> ** @exclude TODO * </pre> */ public static final int SOURCE_CS_CS5_VALUE = 205; /** * <code>CS_ERROR = 200;</code> * * <pre> ** @exclude * </pre> */ public static final int CS_ERROR_VALUE = 200; /** * <code>CS1 = 201;</code> * * <pre> ** @exclude * </pre> */ public static final int CS1_VALUE = 201; /** * <code>CS2 = 202;</code> * * <pre> ** @exclude * </pre> */ public static final int CS2_VALUE = 202; /** * <code>CS3 = 203;</code> * * <pre> ** @exclude * </pre> */ public static final int CS3_VALUE = 203; /** * <code>CS4 = 204;</code> * * <pre> ** @exclude * </pre> */ public static final int CS4_VALUE = 204; /** * <code>CS5 = 205;</code> * * <pre> ** @exclude * </pre> */ public static final int CS5_VALUE = 205; /** * <code>SOURCE_JS_ERROR = 300;</code> * * <pre> ** The file represents a JavaScript source file that had a parse error * </pre> */ public static final int SOURCE_JS_ERROR_VALUE = 300; /** * <code>SOURCE_JS_ES1 = 301;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES1 * </pre> */ public static final int SOURCE_JS_ES1_VALUE = 301; /** * <code>SOURCE_JS_ES2 = 302;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES2 * </pre> */ public static final int SOURCE_JS_ES2_VALUE = 302; /** * <code>SOURCE_JS_ES3 = 303;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES3 * </pre> */ public static final int SOURCE_JS_ES3_VALUE = 303; /** * <code>SOURCE_JS_ES5 = 304;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES5 * </pre> */ public static final int SOURCE_JS_ES5_VALUE = 304; /** * <code>SOURCE_JS_ES6 = 305;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES6 * </pre> */ public static final int SOURCE_JS_ES6_VALUE = 305; /** * <code>SOURCE_JS_ES7 = 306;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES7 * </pre> */ public static final int SOURCE_JS_ES7_VALUE = 306; /** * <code>SOURCE_JS_ES8 = 307;</code> * * <pre> ** The file represents a JavaScript source file that parsed without error as ES8 * </pre> */ public static final int SOURCE_JS_ES8_VALUE = 307; /** * <code>JS_ERROR = 300;</code> * * <pre> ** @exclude * </pre> */ public static final int JS_ERROR_VALUE = 300; /** * <code>SOURCE_PHP_ERROR = 400;</code> * * <pre> ** The file represents a PHP source file that had a parse error * </pre> */ public static final int SOURCE_PHP_ERROR_VALUE = 400; /** * <code>SOURCE_PHP5 = 401;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES1 * </pre> */ public static final int SOURCE_PHP5_VALUE = 401; /** * <code>SOURCE_PHP5_3 = 402;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES2 * </pre> */ public static final int SOURCE_PHP5_3_VALUE = 402; /** * <code>SOURCE_PHP5_4 = 403;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES3 * </pre> */ public static final int SOURCE_PHP5_4_VALUE = 403; /** * <code>SOURCE_PHP5_5 = 404;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES5 * </pre> */ public static final int SOURCE_PHP5_5_VALUE = 404; /** * <code>SOURCE_PHP5_6 = 405;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES6 * </pre> */ public static final int SOURCE_PHP5_6_VALUE = 405; /** * <code>SOURCE_PHP7_0 = 406;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES7 * </pre> */ public static final int SOURCE_PHP7_0_VALUE = 406; /** * <code>SOURCE_PHP7_1 = 407;</code> * * <pre> ** The file represents a PHP source file that parsed without error as ES8 * </pre> */ public static final int SOURCE_PHP7_1_VALUE = 407; /** * <code>PHP_ERROR = 400;</code> * * <pre> ** @exclude * </pre> */ public static final int PHP_ERROR_VALUE = 400; /** * <code>SOURCE_HTML_ERROR = 500;</code> * * <pre> ** The file represents an HTML source file that had a parse error * </pre> */ public static final int SOURCE_HTML_ERROR_VALUE = 500; /** * <code>Source_HTML = 501;</code> * * <pre> ** The file represents an HTML source file that parsed without error * </pre> */ public static final int Source_HTML_VALUE = 501; /** * <code>HTML_ERROR = 500;</code> * * <pre> ** @exclude * </pre> */ public static final int HTML_ERROR_VALUE = 500; /** * <code>SOURCE_XML_ERROR = 600;</code> * * <pre> ** The file represents an XML source file that had a parse error * </pre> */ public static final int SOURCE_XML_ERROR_VALUE = 600; /** * <code>Source_XML = 601;</code> * * <pre> ** The file represents an HTML source file that parsed without error * </pre> */ public static final int Source_XML_VALUE = 601; /** * <code>XML_ERROR = 600;</code> * * <pre> ** @exclude * </pre> */ public static final int XML_ERROR_VALUE = 600; /** * <code>SOURCE_CSS_ERROR = 700;</code> * * <pre> ** The file represents an CSS source file that had a parse error * </pre> */ public static final int SOURCE_CSS_ERROR_VALUE = 700; /** * <code>Source_CSS = 701;</code> * * <pre> ** The file represents an CSS source file that parsed without error * </pre> */ public static final int Source_CSS_VALUE = 701; /** * <code>CSS_ERROR = 700;</code> * * <pre> ** @exclude * </pre> */ public static final int CSS_ERROR_VALUE = 700; public final int getNumber() { return value; } public static FileKind valueOf(int value) { switch (value) { case 0: return OTHER; case 1: return BINARY; case 2: return TEXT; case 3: return XML; case 100: return SOURCE_JAVA_ERROR; case 102: return SOURCE_JAVA_JLS2; case 103: return SOURCE_JAVA_JLS3; case 104: return SOURCE_JAVA_JLS4; case 108: return SOURCE_JAVA_JLS8; case 200: return SOURCE_CS_ERROR; case 201: return SOURCE_CS_CS1; case 202: return SOURCE_CS_CS2; case 203: return SOURCE_CS_CS3; case 204: return SOURCE_CS_CS4; case 205: return SOURCE_CS_CS5; case 300: return SOURCE_JS_ERROR; case 301: return SOURCE_JS_ES1; case 302: return SOURCE_JS_ES2; case 303: return SOURCE_JS_ES3; case 304: return SOURCE_JS_ES5; case 305: return SOURCE_JS_ES6; case 306: return SOURCE_JS_ES7; case 307: return SOURCE_JS_ES8; case 400: return SOURCE_PHP_ERROR; case 401: return SOURCE_PHP5; case 402: return SOURCE_PHP5_3; case 403: return SOURCE_PHP5_4; case 404: return SOURCE_PHP5_5; case 405: return SOURCE_PHP5_6; case 406: return SOURCE_PHP7_0; case 407: return SOURCE_PHP7_1; case 500: return SOURCE_HTML_ERROR; case 501: return Source_HTML; case 600: return SOURCE_XML_ERROR; case 601: return Source_XML; case 700: return SOURCE_CSS_ERROR; case 701: return Source_CSS; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<FileKind> internalGetValueMap() { return internalValueMap; } private static com.google.protobuf.Internal.EnumLiteMap<FileKind> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<FileKind>() { public FileKind findValueByNumber(int number) { return FileKind.valueOf(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { return getDescriptor().getValues().get(index); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return boa.types.Diff.ChangedFile.getDescriptor().getEnumTypes().get(0); } private static final FileKind[] VALUES = { OTHER, BINARY, TEXT, XML, SOURCE_JAVA_ERROR, SOURCE_JAVA_JLS2, SOURCE_JAVA_JLS3, SOURCE_JAVA_JLS4, SOURCE_JAVA_JLS8, JAVA_ERROR, JLS2, JLS3, JLS4, JLS8, SOURCE_CS_ERROR, SOURCE_CS_CS1, SOURCE_CS_CS2, SOURCE_CS_CS3, SOURCE_CS_CS4, SOURCE_CS_CS5, CS_ERROR, CS1, CS2, CS3, CS4, CS5, SOURCE_JS_ERROR, SOURCE_JS_ES1, SOURCE_JS_ES2, SOURCE_JS_ES3, SOURCE_JS_ES5, SOURCE_JS_ES6, SOURCE_JS_ES7, SOURCE_JS_ES8, JS_ERROR, SOURCE_PHP_ERROR, SOURCE_PHP5, SOURCE_PHP5_3, SOURCE_PHP5_4, SOURCE_PHP5_5, SOURCE_PHP5_6, SOURCE_PHP7_0, SOURCE_PHP7_1, PHP_ERROR, SOURCE_HTML_ERROR, Source_HTML, HTML_ERROR, SOURCE_XML_ERROR, Source_XML, XML_ERROR, SOURCE_CSS_ERROR, Source_CSS, CSS_ERROR, }; public static FileKind valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } return VALUES[desc.getIndex()]; } private final int index; private final int value; private FileKind(int index, int value) { this.index = index; this.value = value; } // @@protoc_insertion_point(enum_scope:boa.types.ChangedFile.FileKind) } private int bitField0_; // required .boa.types.ChangeKind change = 1; public static final int CHANGE_FIELD_NUMBER = 1; private boa.types.Shared.ChangeKind change_; /** * <code>required .boa.types.ChangeKind change = 1;</code> * * <pre> ** The kind of change for this file * </pre> */ public boolean hasChange() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .boa.types.ChangeKind change = 1;</code> * * <pre> ** The kind of change for this file * </pre> */ public boa.types.Shared.ChangeKind getChange() { return change_; } // required .boa.types.ChangedFile.FileKind kind = 2; public static final int KIND_FIELD_NUMBER = 2; private boa.types.Diff.ChangedFile.FileKind kind_; /** * <code>required .boa.types.ChangedFile.FileKind kind = 2;</code> * * <pre> ** The kind of file * </pre> */ public boolean hasKind() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .boa.types.ChangedFile.FileKind kind = 2;</code> * * <pre> ** The kind of file * </pre> */ public boa.types.Diff.ChangedFile.FileKind getKind() { return kind_; } // required string name = 3; public static final int NAME_FIELD_NUMBER = 3; private java.lang.Object name_; /** * <code>required string name = 3;</code> * * <pre> ** The full name and path of the file * </pre> */ public boolean hasName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required string name = 3;</code> * * <pre> ** The full name and path of the file * </pre> */ public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { name_ = s; } return s; } } /** * <code>required string name = 3;</code> * * <pre> ** The full name and path of the file * </pre> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // required uint64 key = 4; public static final int KEY_FIELD_NUMBER = 4; private long key_; /** * <code>required uint64 key = 4;</code> * * <pre> ** @exclude * </pre> */ public boolean hasKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required uint64 key = 4;</code> * * <pre> ** @exclude * </pre> */ public long getKey() { return key_; } // required bool ast = 5; public static final int AST_FIELD_NUMBER = 5; private boolean ast_; /** * <code>required bool ast = 5;</code> * * <pre> ** @exclude Indicates if this file has a corresponding parsed AST or not * </pre> */ public boolean hasAst() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>required bool ast = 5;</code> * * <pre> ** @exclude Indicates if this file has a corresponding parsed AST or not * </pre> */ public boolean getAst() { return ast_; } // optional .boa.types.CommentsRoot comments = 6; public static final int COMMENTS_FIELD_NUMBER = 6; private boa.types.Ast.CommentsRoot comments_; /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ public boolean hasComments() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ public boa.types.Ast.CommentsRoot getComments() { return comments_; } /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ public boa.types.Ast.CommentsRootOrBuilder getCommentsOrBuilder() { return comments_; } // repeated .boa.types.ChangeKind changes = 7; public static final int CHANGES_FIELD_NUMBER = 7; private java.util.List<boa.types.Shared.ChangeKind> changes_; /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public java.util.List<boa.types.Shared.ChangeKind> getChangesList() { return changes_; } /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public int getChangesCount() { return changes_.size(); } /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public boa.types.Shared.ChangeKind getChanges(int index) { return changes_.get(index); } // repeated string previous_names = 8; public static final int PREVIOUS_NAMES_FIELD_NUMBER = 8; private com.google.protobuf.LazyStringList previousNames_; /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public java.util.List<java.lang.String> getPreviousNamesList() { return previousNames_; } /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public int getPreviousNamesCount() { return previousNames_.size(); } /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public java.lang.String getPreviousNames(int index) { return previousNames_.get(index); } /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public com.google.protobuf.ByteString getPreviousNamesBytes(int index) { return previousNames_.getByteString(index); } // repeated int32 previous_versions = 9; public static final int PREVIOUS_VERSIONS_FIELD_NUMBER = 9; private java.util.List<java.lang.Integer> previousVersions_; /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ public java.util.List<java.lang.Integer> getPreviousVersionsList() { return previousVersions_; } /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ public int getPreviousVersionsCount() { return previousVersions_.size(); } /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ public int getPreviousVersions(int index) { return previousVersions_.get(index); } // repeated int32 previous_indices = 10; public static final int PREVIOUS_INDICES_FIELD_NUMBER = 10; private java.util.List<java.lang.Integer> previousIndices_; /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ public java.util.List<java.lang.Integer> getPreviousIndicesList() { return previousIndices_; } /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ public int getPreviousIndicesCount() { return previousIndices_.size(); } /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ public int getPreviousIndices(int index) { return previousIndices_.get(index); } private void initFields() { change_ = boa.types.Shared.ChangeKind.UNKNOWN; kind_ = boa.types.Diff.ChangedFile.FileKind.OTHER; name_ = ""; key_ = 0L; ast_ = false; comments_ = boa.types.Ast.CommentsRoot.getDefaultInstance(); changes_ = java.util.Collections.emptyList(); previousNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; previousVersions_ = java.util.Collections.emptyList(); previousIndices_ = java.util.Collections.emptyList(); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasChange()) { memoizedIsInitialized = 0; return false; } if (!hasKind()) { memoizedIsInitialized = 0; return false; } if (!hasName()) { memoizedIsInitialized = 0; return false; } if (!hasKey()) { memoizedIsInitialized = 0; return false; } if (!hasAst()) { memoizedIsInitialized = 0; return false; } if (hasComments()) { if (!getComments().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, change_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeEnum(2, kind_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, getNameBytes()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { output.writeUInt64(4, key_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { output.writeBool(5, ast_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { output.writeMessage(6, comments_); } for (int i = 0; i < changes_.size(); i++) { output.writeEnum(7, changes_.get(i).getNumber()); } for (int i = 0; i < previousNames_.size(); i++) { output.writeBytes(8, previousNames_.getByteString(i)); } for (int i = 0; i < previousVersions_.size(); i++) { output.writeInt32(9, previousVersions_.get(i)); } for (int i = 0; i < previousIndices_.size(); i++) { output.writeInt32(10, previousIndices_.get(i)); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, change_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, kind_.getNumber()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, getNameBytes()); } if (((bitField0_ & 0x00000008) == 0x00000008)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(4, key_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, ast_); } if (((bitField0_ & 0x00000020) == 0x00000020)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(6, comments_); } { int dataSize = 0; for (int i = 0; i < changes_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeEnumSizeNoTag(changes_.get(i).getNumber()); } size += dataSize; size += 1 * changes_.size(); } { int dataSize = 0; for (int i = 0; i < previousNames_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeBytesSizeNoTag(previousNames_.getByteString(i)); } size += dataSize; size += 1 * getPreviousNamesList().size(); } { int dataSize = 0; for (int i = 0; i < previousVersions_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(previousVersions_.get(i)); } size += dataSize; size += 1 * getPreviousVersionsList().size(); } { int dataSize = 0; for (int i = 0; i < previousIndices_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeInt32SizeNoTag(previousIndices_.get(i)); } size += dataSize; size += 1 * getPreviousIndicesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static boa.types.Diff.ChangedFile parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static boa.types.Diff.ChangedFile parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static boa.types.Diff.ChangedFile parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static boa.types.Diff.ChangedFile parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static boa.types.Diff.ChangedFile parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static boa.types.Diff.ChangedFile parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static boa.types.Diff.ChangedFile parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static boa.types.Diff.ChangedFile parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static boa.types.Diff.ChangedFile parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static boa.types.Diff.ChangedFile parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(boa.types.Diff.ChangedFile prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code boa.types.ChangedFile} * * <pre> ** A file committed in a Revision * </pre> */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements boa.types.Diff.ChangedFileOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return boa.types.Diff.internal_static_boa_types_ChangedFile_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return boa.types.Diff.internal_static_boa_types_ChangedFile_fieldAccessorTable .ensureFieldAccessorsInitialized( boa.types.Diff.ChangedFile.class, boa.types.Diff.ChangedFile.Builder.class); } // Construct using boa.types.Diff.ChangedFile.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getCommentsFieldBuilder(); } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); change_ = boa.types.Shared.ChangeKind.UNKNOWN; bitField0_ = (bitField0_ & ~0x00000001); kind_ = boa.types.Diff.ChangedFile.FileKind.OTHER; bitField0_ = (bitField0_ & ~0x00000002); name_ = ""; bitField0_ = (bitField0_ & ~0x00000004); key_ = 0L; bitField0_ = (bitField0_ & ~0x00000008); ast_ = false; bitField0_ = (bitField0_ & ~0x00000010); if (commentsBuilder_ == null) { comments_ = boa.types.Ast.CommentsRoot.getDefaultInstance(); } else { commentsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); changes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); previousNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000080); previousVersions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000100); previousIndices_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000200); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return boa.types.Diff.internal_static_boa_types_ChangedFile_descriptor; } public boa.types.Diff.ChangedFile getDefaultInstanceForType() { return boa.types.Diff.ChangedFile.getDefaultInstance(); } public boa.types.Diff.ChangedFile build() { boa.types.Diff.ChangedFile result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public boa.types.Diff.ChangedFile buildPartial() { boa.types.Diff.ChangedFile result = new boa.types.Diff.ChangedFile(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.change_ = change_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.kind_ = kind_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.name_ = name_; if (((from_bitField0_ & 0x00000008) == 0x00000008)) { to_bitField0_ |= 0x00000008; } result.key_ = key_; if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } result.ast_ = ast_; if (((from_bitField0_ & 0x00000020) == 0x00000020)) { to_bitField0_ |= 0x00000020; } if (commentsBuilder_ == null) { result.comments_ = comments_; } else { result.comments_ = commentsBuilder_.build(); } if (((bitField0_ & 0x00000040) == 0x00000040)) { changes_ = java.util.Collections.unmodifiableList(changes_); bitField0_ = (bitField0_ & ~0x00000040); } result.changes_ = changes_; if (((bitField0_ & 0x00000080) == 0x00000080)) { previousNames_ = new com.google.protobuf.UnmodifiableLazyStringList( previousNames_); bitField0_ = (bitField0_ & ~0x00000080); } result.previousNames_ = previousNames_; if (((bitField0_ & 0x00000100) == 0x00000100)) { previousVersions_ = java.util.Collections.unmodifiableList(previousVersions_); bitField0_ = (bitField0_ & ~0x00000100); } result.previousVersions_ = previousVersions_; if (((bitField0_ & 0x00000200) == 0x00000200)) { previousIndices_ = java.util.Collections.unmodifiableList(previousIndices_); bitField0_ = (bitField0_ & ~0x00000200); } result.previousIndices_ = previousIndices_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof boa.types.Diff.ChangedFile) { return mergeFrom((boa.types.Diff.ChangedFile)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(boa.types.Diff.ChangedFile other) { if (other == boa.types.Diff.ChangedFile.getDefaultInstance()) return this; if (other.hasChange()) { setChange(other.getChange()); } if (other.hasKind()) { setKind(other.getKind()); } if (other.hasName()) { bitField0_ |= 0x00000004; name_ = other.name_; onChanged(); } if (other.hasKey()) { setKey(other.getKey()); } if (other.hasAst()) { setAst(other.getAst()); } if (other.hasComments()) { mergeComments(other.getComments()); } if (!other.changes_.isEmpty()) { if (changes_.isEmpty()) { changes_ = other.changes_; bitField0_ = (bitField0_ & ~0x00000040); } else { ensureChangesIsMutable(); changes_.addAll(other.changes_); } onChanged(); } if (!other.previousNames_.isEmpty()) { if (previousNames_.isEmpty()) { previousNames_ = other.previousNames_; bitField0_ = (bitField0_ & ~0x00000080); } else { ensurePreviousNamesIsMutable(); previousNames_.addAll(other.previousNames_); } onChanged(); } if (!other.previousVersions_.isEmpty()) { if (previousVersions_.isEmpty()) { previousVersions_ = other.previousVersions_; bitField0_ = (bitField0_ & ~0x00000100); } else { ensurePreviousVersionsIsMutable(); previousVersions_.addAll(other.previousVersions_); } onChanged(); } if (!other.previousIndices_.isEmpty()) { if (previousIndices_.isEmpty()) { previousIndices_ = other.previousIndices_; bitField0_ = (bitField0_ & ~0x00000200); } else { ensurePreviousIndicesIsMutable(); previousIndices_.addAll(other.previousIndices_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasChange()) { return false; } if (!hasKind()) { return false; } if (!hasName()) { return false; } if (!hasKey()) { return false; } if (!hasAst()) { return false; } if (hasComments()) { if (!getComments().isInitialized()) { return false; } } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { boa.types.Diff.ChangedFile parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (boa.types.Diff.ChangedFile) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; // required .boa.types.ChangeKind change = 1; private boa.types.Shared.ChangeKind change_ = boa.types.Shared.ChangeKind.UNKNOWN; /** * <code>required .boa.types.ChangeKind change = 1;</code> * * <pre> ** The kind of change for this file * </pre> */ public boolean hasChange() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>required .boa.types.ChangeKind change = 1;</code> * * <pre> ** The kind of change for this file * </pre> */ public boa.types.Shared.ChangeKind getChange() { return change_; } /** * <code>required .boa.types.ChangeKind change = 1;</code> * * <pre> ** The kind of change for this file * </pre> */ public Builder setChange(boa.types.Shared.ChangeKind value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; change_ = value; onChanged(); return this; } /** * <code>required .boa.types.ChangeKind change = 1;</code> * * <pre> ** The kind of change for this file * </pre> */ public Builder clearChange() { bitField0_ = (bitField0_ & ~0x00000001); change_ = boa.types.Shared.ChangeKind.UNKNOWN; onChanged(); return this; } // required .boa.types.ChangedFile.FileKind kind = 2; private boa.types.Diff.ChangedFile.FileKind kind_ = boa.types.Diff.ChangedFile.FileKind.OTHER; /** * <code>required .boa.types.ChangedFile.FileKind kind = 2;</code> * * <pre> ** The kind of file * </pre> */ public boolean hasKind() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <code>required .boa.types.ChangedFile.FileKind kind = 2;</code> * * <pre> ** The kind of file * </pre> */ public boa.types.Diff.ChangedFile.FileKind getKind() { return kind_; } /** * <code>required .boa.types.ChangedFile.FileKind kind = 2;</code> * * <pre> ** The kind of file * </pre> */ public Builder setKind(boa.types.Diff.ChangedFile.FileKind value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; kind_ = value; onChanged(); return this; } /** * <code>required .boa.types.ChangedFile.FileKind kind = 2;</code> * * <pre> ** The kind of file * </pre> */ public Builder clearKind() { bitField0_ = (bitField0_ & ~0x00000002); kind_ = boa.types.Diff.ChangedFile.FileKind.OTHER; onChanged(); return this; } // required string name = 3; private java.lang.Object name_ = ""; /** * <code>required string name = 3;</code> * * <pre> ** The full name and path of the file * </pre> */ public boolean hasName() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>required string name = 3;</code> * * <pre> ** The full name and path of the file * </pre> */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * <code>required string name = 3;</code> * * <pre> ** The full name and path of the file * </pre> */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <code>required string name = 3;</code> * * <pre> ** The full name and path of the file * </pre> */ public Builder setName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; name_ = value; onChanged(); return this; } /** * <code>required string name = 3;</code> * * <pre> ** The full name and path of the file * </pre> */ public Builder clearName() { bitField0_ = (bitField0_ & ~0x00000004); name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <code>required string name = 3;</code> * * <pre> ** The full name and path of the file * </pre> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; name_ = value; onChanged(); return this; } // required uint64 key = 4; private long key_ ; /** * <code>required uint64 key = 4;</code> * * <pre> ** @exclude * </pre> */ public boolean hasKey() { return ((bitField0_ & 0x00000008) == 0x00000008); } /** * <code>required uint64 key = 4;</code> * * <pre> ** @exclude * </pre> */ public long getKey() { return key_; } /** * <code>required uint64 key = 4;</code> * * <pre> ** @exclude * </pre> */ public Builder setKey(long value) { bitField0_ |= 0x00000008; key_ = value; onChanged(); return this; } /** * <code>required uint64 key = 4;</code> * * <pre> ** @exclude * </pre> */ public Builder clearKey() { bitField0_ = (bitField0_ & ~0x00000008); key_ = 0L; onChanged(); return this; } // required bool ast = 5; private boolean ast_ ; /** * <code>required bool ast = 5;</code> * * <pre> ** @exclude Indicates if this file has a corresponding parsed AST or not * </pre> */ public boolean hasAst() { return ((bitField0_ & 0x00000010) == 0x00000010); } /** * <code>required bool ast = 5;</code> * * <pre> ** @exclude Indicates if this file has a corresponding parsed AST or not * </pre> */ public boolean getAst() { return ast_; } /** * <code>required bool ast = 5;</code> * * <pre> ** @exclude Indicates if this file has a corresponding parsed AST or not * </pre> */ public Builder setAst(boolean value) { bitField0_ |= 0x00000010; ast_ = value; onChanged(); return this; } /** * <code>required bool ast = 5;</code> * * <pre> ** @exclude Indicates if this file has a corresponding parsed AST or not * </pre> */ public Builder clearAst() { bitField0_ = (bitField0_ & ~0x00000010); ast_ = false; onChanged(); return this; } // optional .boa.types.CommentsRoot comments = 6; private boa.types.Ast.CommentsRoot comments_ = boa.types.Ast.CommentsRoot.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< boa.types.Ast.CommentsRoot, boa.types.Ast.CommentsRoot.Builder, boa.types.Ast.CommentsRootOrBuilder> commentsBuilder_; /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ public boolean hasComments() { return ((bitField0_ & 0x00000020) == 0x00000020); } /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ public boa.types.Ast.CommentsRoot getComments() { if (commentsBuilder_ == null) { return comments_; } else { return commentsBuilder_.getMessage(); } } /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ public Builder setComments(boa.types.Ast.CommentsRoot value) { if (commentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } comments_ = value; onChanged(); } else { commentsBuilder_.setMessage(value); } bitField0_ |= 0x00000020; return this; } /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ public Builder setComments( boa.types.Ast.CommentsRoot.Builder builderForValue) { if (commentsBuilder_ == null) { comments_ = builderForValue.build(); onChanged(); } else { commentsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000020; return this; } /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ public Builder mergeComments(boa.types.Ast.CommentsRoot value) { if (commentsBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020) && comments_ != boa.types.Ast.CommentsRoot.getDefaultInstance()) { comments_ = boa.types.Ast.CommentsRoot.newBuilder(comments_).mergeFrom(value).buildPartial(); } else { comments_ = value; } onChanged(); } else { commentsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; return this; } /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ public Builder clearComments() { if (commentsBuilder_ == null) { comments_ = boa.types.Ast.CommentsRoot.getDefaultInstance(); onChanged(); } else { commentsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000020); return this; } /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ public boa.types.Ast.CommentsRoot.Builder getCommentsBuilder() { bitField0_ |= 0x00000020; onChanged(); return getCommentsFieldBuilder().getBuilder(); } /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ public boa.types.Ast.CommentsRootOrBuilder getCommentsOrBuilder() { if (commentsBuilder_ != null) { return commentsBuilder_.getMessageOrBuilder(); } else { return comments_; } } /** * <code>optional .boa.types.CommentsRoot comments = 6;</code> * * <pre> ** @exclude * </pre> */ private com.google.protobuf.SingleFieldBuilder< boa.types.Ast.CommentsRoot, boa.types.Ast.CommentsRoot.Builder, boa.types.Ast.CommentsRootOrBuilder> getCommentsFieldBuilder() { if (commentsBuilder_ == null) { commentsBuilder_ = new com.google.protobuf.SingleFieldBuilder< boa.types.Ast.CommentsRoot, boa.types.Ast.CommentsRoot.Builder, boa.types.Ast.CommentsRootOrBuilder>( comments_, getParentForChildren(), isClean()); comments_ = null; } return commentsBuilder_; } // repeated .boa.types.ChangeKind changes = 7; private java.util.List<boa.types.Shared.ChangeKind> changes_ = java.util.Collections.emptyList(); private void ensureChangesIsMutable() { if (!((bitField0_ & 0x00000040) == 0x00000040)) { changes_ = new java.util.ArrayList<boa.types.Shared.ChangeKind>(changes_); bitField0_ |= 0x00000040; } } /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public java.util.List<boa.types.Shared.ChangeKind> getChangesList() { return java.util.Collections.unmodifiableList(changes_); } /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public int getChangesCount() { return changes_.size(); } /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public boa.types.Shared.ChangeKind getChanges(int index) { return changes_.get(index); } /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public Builder setChanges( int index, boa.types.Shared.ChangeKind value) { if (value == null) { throw new NullPointerException(); } ensureChangesIsMutable(); changes_.set(index, value); onChanged(); return this; } /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public Builder addChanges(boa.types.Shared.ChangeKind value) { if (value == null) { throw new NullPointerException(); } ensureChangesIsMutable(); changes_.add(value); onChanged(); return this; } /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public Builder addAllChanges( java.lang.Iterable<? extends boa.types.Shared.ChangeKind> values) { ensureChangesIsMutable(); super.addAll(values, changes_); onChanged(); return this; } /** * <code>repeated .boa.types.ChangeKind changes = 7;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public Builder clearChanges() { changes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000040); onChanged(); return this; } // repeated string previous_names = 8; private com.google.protobuf.LazyStringList previousNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensurePreviousNamesIsMutable() { if (!((bitField0_ & 0x00000080) == 0x00000080)) { previousNames_ = new com.google.protobuf.LazyStringArrayList(previousNames_); bitField0_ |= 0x00000080; } } /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public java.util.List<java.lang.String> getPreviousNamesList() { return java.util.Collections.unmodifiableList(previousNames_); } /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public int getPreviousNamesCount() { return previousNames_.size(); } /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public java.lang.String getPreviousNames(int index) { return previousNames_.get(index); } /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public com.google.protobuf.ByteString getPreviousNamesBytes(int index) { return previousNames_.getByteString(index); } /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public Builder setPreviousNames( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensurePreviousNamesIsMutable(); previousNames_.set(index, value); onChanged(); return this; } /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public Builder addPreviousNames( java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensurePreviousNamesIsMutable(); previousNames_.add(value); onChanged(); return this; } /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public Builder addAllPreviousNames( java.lang.Iterable<java.lang.String> values) { ensurePreviousNamesIsMutable(); super.addAll(values, previousNames_); onChanged(); return this; } /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public Builder clearPreviousNames() { previousNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000080); onChanged(); return this; } /** * <code>repeated string previous_names = 8;</code> * * <pre> ** The kinds of changes of this this compared to the corresponding parent commits * </pre> */ public Builder addPreviousNamesBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } ensurePreviousNamesIsMutable(); previousNames_.add(value); onChanged(); return this; } // repeated int32 previous_versions = 9; private java.util.List<java.lang.Integer> previousVersions_ = java.util.Collections.emptyList(); private void ensurePreviousVersionsIsMutable() { if (!((bitField0_ & 0x00000100) == 0x00000100)) { previousVersions_ = new java.util.ArrayList<java.lang.Integer>(previousVersions_); bitField0_ |= 0x00000100; } } /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ public java.util.List<java.lang.Integer> getPreviousVersionsList() { return java.util.Collections.unmodifiableList(previousVersions_); } /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ public int getPreviousVersionsCount() { return previousVersions_.size(); } /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ public int getPreviousVersions(int index) { return previousVersions_.get(index); } /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ public Builder setPreviousVersions( int index, int value) { ensurePreviousVersionsIsMutable(); previousVersions_.set(index, value); onChanged(); return this; } /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ public Builder addPreviousVersions(int value) { ensurePreviousVersionsIsMutable(); previousVersions_.add(value); onChanged(); return this; } /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ public Builder addAllPreviousVersions( java.lang.Iterable<? extends java.lang.Integer> values) { ensurePreviousVersionsIsMutable(); super.addAll(values, previousVersions_); onChanged(); return this; } /** * <code>repeated int32 previous_versions = 9;</code> * * <pre> ** @exclude The indices of the corresponding parent commits in the list of all commits * </pre> */ public Builder clearPreviousVersions() { previousVersions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000100); onChanged(); return this; } // repeated int32 previous_indices = 10; private java.util.List<java.lang.Integer> previousIndices_ = java.util.Collections.emptyList(); private void ensurePreviousIndicesIsMutable() { if (!((bitField0_ & 0x00000200) == 0x00000200)) { previousIndices_ = new java.util.ArrayList<java.lang.Integer>(previousIndices_); bitField0_ |= 0x00000200; } } /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ public java.util.List<java.lang.Integer> getPreviousIndicesList() { return java.util.Collections.unmodifiableList(previousIndices_); } /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ public int getPreviousIndicesCount() { return previousIndices_.size(); } /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ public int getPreviousIndices(int index) { return previousIndices_.get(index); } /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ public Builder setPreviousIndices( int index, int value) { ensurePreviousIndicesIsMutable(); previousIndices_.set(index, value); onChanged(); return this; } /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ public Builder addPreviousIndices(int value) { ensurePreviousIndicesIsMutable(); previousIndices_.add(value); onChanged(); return this; } /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ public Builder addAllPreviousIndices( java.lang.Iterable<? extends java.lang.Integer> values) { ensurePreviousIndicesIsMutable(); super.addAll(values, previousIndices_); onChanged(); return this; } /** * <code>repeated int32 previous_indices = 10;</code> * * <pre> ** @exclude The indices of the previous files in the list of changed files of the corresponding parent commits * </pre> */ public Builder clearPreviousIndices() { previousIndices_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000200); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:boa.types.ChangedFile) } static { defaultInstance = new ChangedFile(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:boa.types.ChangedFile) } private static com.google.protobuf.Descriptors.Descriptor internal_static_boa_types_ChangedFile_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_boa_types_ChangedFile_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\ndiff.proto\022\tboa.types\032\014shared.proto\032\ta" + "st.proto\"\325\t\n\013ChangedFile\022%\n\006change\030\001 \002(\016" + "2\025.boa.types.ChangeKind\022-\n\004kind\030\002 \002(\0162\037." + "boa.types.ChangedFile.FileKind\022\014\n\004name\030\003" + " \002(\t\022\013\n\003key\030\004 \002(\004\022\013\n\003ast\030\005 \002(\010\022)\n\010commen" + "ts\030\006 \001(\0132\027.boa.types.CommentsRoot\022&\n\007cha" + "nges\030\007 \003(\0162\025.boa.types.ChangeKind\022\026\n\016pre" + "vious_names\030\010 \003(\t\022\031\n\021previous_versions\030\t" + " \003(\005\022\030\n\020previous_indices\030\n \003(\005\"\247\007\n\010FileK" + "ind\022\t\n\005OTHER\020\000\022\n\n\006BINARY\020\001\022\010\n\004TEXT\020\002\022\007\n\003", "XML\020\003\022\025\n\021SOURCE_JAVA_ERROR\020d\022\024\n\020SOURCE_J" + "AVA_JLS2\020f\022\024\n\020SOURCE_JAVA_JLS3\020g\022\024\n\020SOUR" + "CE_JAVA_JLS4\020h\022\024\n\020SOURCE_JAVA_JLS8\020l\022\016\n\n" + "JAVA_ERROR\020d\022\010\n\004JLS2\020f\022\010\n\004JLS3\020g\022\010\n\004JLS4" + "\020h\022\010\n\004JLS8\020l\022\024\n\017SOURCE_CS_ERROR\020\310\001\022\022\n\rSO" + "URCE_CS_CS1\020\311\001\022\022\n\rSOURCE_CS_CS2\020\312\001\022\022\n\rSO" + "URCE_CS_CS3\020\313\001\022\022\n\rSOURCE_CS_CS4\020\314\001\022\022\n\rSO" + "URCE_CS_CS5\020\315\001\022\r\n\010CS_ERROR\020\310\001\022\010\n\003CS1\020\311\001\022" + "\010\n\003CS2\020\312\001\022\010\n\003CS3\020\313\001\022\010\n\003CS4\020\314\001\022\010\n\003CS5\020\315\001\022" + "\024\n\017SOURCE_JS_ERROR\020\254\002\022\022\n\rSOURCE_JS_ES1\020\255", "\002\022\022\n\rSOURCE_JS_ES2\020\256\002\022\022\n\rSOURCE_JS_ES3\020\257" + "\002\022\022\n\rSOURCE_JS_ES5\020\260\002\022\022\n\rSOURCE_JS_ES6\020\261" + "\002\022\022\n\rSOURCE_JS_ES7\020\262\002\022\022\n\rSOURCE_JS_ES8\020\263" + "\002\022\r\n\010JS_ERROR\020\254\002\022\025\n\020SOURCE_PHP_ERROR\020\220\003\022" + "\020\n\013SOURCE_PHP5\020\221\003\022\022\n\rSOURCE_PHP5_3\020\222\003\022\022\n" + "\rSOURCE_PHP5_4\020\223\003\022\022\n\rSOURCE_PHP5_5\020\224\003\022\022\n" + "\rSOURCE_PHP5_6\020\225\003\022\022\n\rSOURCE_PHP7_0\020\226\003\022\022\n" + "\rSOURCE_PHP7_1\020\227\003\022\016\n\tPHP_ERROR\020\220\003\022\026\n\021SOU" + "RCE_HTML_ERROR\020\364\003\022\020\n\013Source_HTML\020\365\003\022\017\n\nH" + "TML_ERROR\020\364\003\022\025\n\020SOURCE_XML_ERROR\020\330\004\022\017\n\nS", "ource_XML\020\331\004\022\016\n\tXML_ERROR\020\330\004\022\025\n\020SOURCE_C" + "SS_ERROR\020\274\005\022\017\n\nSource_CSS\020\275\005\022\016\n\tCSS_ERRO" + "R\020\274\005\032\002\020\001B\002H\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_boa_types_ChangedFile_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_boa_types_ChangedFile_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_boa_types_ChangedFile_descriptor, new java.lang.String[] { "Change", "Kind", "Name", "Key", "Ast", "Comments", "Changes", "PreviousNames", "PreviousVersions", "PreviousIndices", }); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { boa.types.Shared.getDescriptor(), boa.types.Ast.getDescriptor(), }, assigner); } // @@protoc_insertion_point(outer_class_scope) }
apache-2.0
kogupta/scala-playground
src/main/scala/kogu/practice/cats/monads/Main.scala
98
package kogu.practice.cats.monads object Main { def main(args: Array[String]): Unit = { } }
apache-2.0
uhonliu/swoft
src/Db/AbstractDbConnect.php
1817
<?php namespace Swoft\Db; use Swoft\Pool\AbstractConnect; /** * 数据库抽象连接 * * @uses AbstractDbConnect * @version 2017年09月29日 * @author stelin <phpcrazy@126.com> * @copyright Copyright 2010-2016 swoft software * @license PHP Version 7.x {@link http://www.php.net/license/3_0.txt} */ abstract class AbstractDbConnect extends AbstractConnect implements IDbConnect { /** * 收包 */ public function recv() { } /** * 设置延迟收包 * * @param bool $defer */ public function setDefer($defer = true) { } /** * 返回数据库驱动 * * @return string */ public function getDriver(): string { return $this->connectPool->getDriver(); } /** * 解析mysql连接串 * * @param string $uri * * @return array */ protected function parseUri(string $uri) { $parseAry = parse_url($uri); if (!isset($parseAry['host']) || !isset($parseAry['port']) || !isset($parseAry['path']) || !isset($parseAry['query'])) { throw new \InvalidArgumentException("数据量连接uri格式不正确,uri=" . $uri); } $parseAry['database'] = str_replace('/', '', $parseAry['path']); $query = $parseAry['query']; parse_str($query, $options); if (!isset($options['user']) || !isset($options['password'])) { throw new \InvalidArgumentException("数据量连接uri格式不正确,未配置用户名和密码,uri=" . $uri); } if (!isset($options['charset'])) { $options['charset'] = ""; } $configs = array_merge($parseAry, $options); unset($configs['path']); unset($configs['query']); return $configs; } }
apache-2.0
markantill/PropertyCopier
PropertyCopier/Generators/MatchedPropertyNamesGenerator.cs
2601
using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Reflection; using System.Text; using System.Threading.Tasks; using PropertyCopier.Data; namespace PropertyCopier.Generators { /// <summary> /// Match properties based on same name as determined my the memberNameComparer and source type /// is castable to target type. /// </summary> internal class MatchedPropertyNamesGenerator : IExpressionGenerator { public ExpressionGeneratorResult GenerateExpressions( Expression sourceExpression, ICollection<PropertyInfo> targetProperties, MappingData mappingData, IEqualityComparer<string> memberNameComparer) { var expressions = new List<PropertyAndExpression>(); var matched = new List<PropertyInfo>(); var matches = GetMatchedProperties(mappingData.GetSourceProperties(), targetProperties, memberNameComparer); foreach (var propertyMatch in matches) { if (propertyMatch.TargetProperty.PropertyType.IsValueType || propertyMatch.TargetProperty.PropertyType == typeof(string)) { var sourceExp = ExpressionBuilder.CreateSourceExpression( propertyMatch.TargetProperty, propertyMatch.SourceProperty, sourceExpression); expressions.Add(new PropertyAndExpression(propertyMatch.TargetProperty, sourceExp)); matched.Add(propertyMatch.TargetProperty); } } var newTargetProperties = targetProperties.Except(matched).ToArray(); return new ExpressionGeneratorResult { UnmappedTargetProperties = newTargetProperties, Expressions = expressions, }; } public IEqualityComparer<string> MemberNameComparer { get; set; } internal static IEnumerable<PropertyPair> GetMatchedProperties( IEnumerable<PropertyInfo> sourceProperties, IEnumerable<PropertyInfo> targetProperties, IEqualityComparer<string> memberNameComparer) { var matches = from match in TypeHelper.GetNameMatchedProperties(sourceProperties, targetProperties, memberNameComparer) where match.SourceProperty.PropertyType.IsCastableTo(match.TargetProperty.PropertyType) select match; return matches; } } }
apache-2.0
cklokma/p8-sbe
src/engd/href/eclipz/poreve/working/model/poremodel.H
35104
//------------------------------------------------------------------------------ // IBM_PROLOG_BEGIN_TAG // This is an automatically generated prolog. // // OpenPOWER Project // // Contributors Listed Below - COPYRIGHT 2012,2016 // [+] International Business Machines Corp. // // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. // // IBM_PROLOG_END_TAG //------------------------------------------------------------------------------ #ifndef __VSBE_POREMODEL_H #define __VSBE_POREMODEL_H // $Id: poremodel.H,v 1.25 2012/06/18 13:56:57 bcbrock Exp $ /// \file poremodel.H /// \brief The PORE hardware engine model #include <stddef.h> #include <stdint.h> #include "modelerror.H" #include "poreinterface.H" #include "transaction.H" // This is required for debugging prints only #if 1 #include <stdio.h> #endif namespace vsbe { class PoreAddress; class PoreInterface; class PoreModel; }; //////////////////////////////////////////////////////////////////////////// // PoreModel //////////////////////////////////////////////////////////////////////////// /// Abstract base class for the PORE hardware engine model /// /// PoreModel is an abstract class that specifies the interfaces that a PORE /// model will use to communicate to the virtual environment, and that the /// environment will use to control the model. This class is designed to /// allow different implementations of PoreModel to be easily interchanged; /// see the create() method. /// /// The PoreModel is completely abstracted from the environment, however. The /// PoreModel communicates with the environment through a subclass of the /// PoreInterface class. This PoreInterface arranges for the implementation /// of the externalized PoreModel operations. class vsbe::PoreModel { public: /////////////// Common Creation/Control Interface //////////////////// /// Create a new instance of a model derived from PoreModel /// /// \param[in] i_id The IBUF ID (PORE engine type) of the model to /// create. /// /// \param[in] i_interface A pointer back to the PoreInterface that /// creates and owns this object and implements several of the PoreModel /// methods. This parameter must be provided to the base class /// constructor. /// /// To facilitate swapping different PORE hardware models in and out /// transparently, this abstract base class declares a static function /// that creates an instance of the model to use. Whoever provides the /// implementation of the derived class must define this method to produce /// an instance of the derived class the conforms to a PORE hardware /// engine of the parameterized type. static PoreModel* create(PoreIbufId i_id, PoreInterface *i_interface); /// Restart the PORE engine in its auto-POR or scan-flush state /// /// For PORE-SBE, the engine is reset and inintialized as if the auto-POR /// sequence had run. This means that the engine will be running at the /// first program vector in the OTPROM. For the other engines, they are /// reset as documented and will not run (correctly) until the BASE /// address and EXE trigger registers are written. As a side effect the /// status of the PoreModel is computed and returned. The application /// should always restart() the model after construction to ensure that /// the model is in the correct reset state. /// /// \retval status PORE-SBE returns 0 (running); The other engines return /// PORE_STATUS_HARDWARE_STOP. virtual int restart(); /// Step the model by (at most) N instructions /// /// \param[in] i_instructions The (maximum) number of instructions to /// execute. Use the constant RUN_UNLIMITED to run (effectively) forever. /// /// \param[out] o_ran The number of instructions that actually ran. This /// may be 0 if \a i_instructions is 0, or if the PoreModel was in an /// unrunnable state when run() was invoked. To find the total number of /// instructions that have run since model construction (or the last /// restart()), use the method getInstructions(). /// /// When this method is invoked it is assumed that the caller wants the /// model to run, regardless of its current state. The run() method first /// attempts to make a stopped machine runnable, and then attempts to /// execute \a i_instructions and returns the final status and the number /// of instructions actually completed by this invocation. /// /// - If the engine shows an error status before the execution of run() /// then no instructions will be executed and \a o_ran will be returned as /// 0. To clear the error status use clearModelError() or /// clearHardwareErrors() to continue with restart()-ing the machine. /// /// - If the engine shows a stopped or error status at any time after the /// execution of the first instruction but before the execution of the /// final requested instruction, then run() returns at that point and \a /// o_ran indicates how many instructions were actually executed. /// /// - Calling run() with \a i_instructions == 0 simply makes a stopped /// machine runnable (if possible). /// /// Note that executing a single instruction may entail many bus /// transactions, especially in the case of the SCAND command which is /// treated as single instructions. WAIT is also considered a single /// instruction regardless of the wait count. /// /// \retval 0 The PORE remains runnable. In this case \a o_ran == \a /// i_instructions. /// /// \retval status An OR-combination of 1 or more PORE_STATUS_* flags /// indicating the final state of the PORE. virtual int run(const uint64_t i_instructions, uint64_t& o_ran); /// Signal a debug-stop /// /// \param[in] i_stopCode A user-defined integer code to categorize the /// reason for the stop. Use \a i_stopCode == 0 to simply stop the /// machine. Use a non-zero stop code to also cause /// PORE_STATUS_DEBUG_STOP to be set in the final status. /// /// This API is provided for control programs or debugging code /// (e.g. hooks) to force the simulation to stop during the execution of /// the run() method, or to insure that the PORE engine is stopped and in /// the WAIT state prior to updating or examining the state. The effect of /// stop() is to terminate any ongoing run() method immediately by writing /// the PORE \c stop_start bit with a 1. Calling the stop() method with a /// non-zero \a i_stopCode leaves the PORE abstract status with the /// PORE_STATUS_DEBUG_STOP bit set, and the \a i_stopCode provided can be /// later recovered with the getStopCode() method. Calling stop() multiple /// times with non-zero \a i_stopCode only records the last /// instance. Absent any error conditions a subsequent call of run() will /// restart the simulation. /// /// \retval me Returns 0 for success, otherwise any ModelError /// encountered during processing of the method. If the engine has /// ModelError status prior to the call the method returns that status /// immediately (does not perform the stop() operation). virtual ModelError stop(const int i_stopCode); /// Signal a modeling error /// /// \param[in] i_modelError A model-defined integer code to categorize the /// type of modeling error. If the model status includes /// PORE_STATUS_MODEL_ERROR then the getModelError() method returns this /// code, otherwise 0. If \a i_modelError is 0 then this method has no /// effect. If modelError() is called multiple times with non-0 \a /// i_modelError during the execution of a single instruction then only /// the last non-0 value is recorded. /// /// This API is provided as a way for the system-level model or /// application debugging code (e.g. hooks) to force the simulation to /// halt with an error during the execution of the run() method. The /// effect of the modelError() with a non-0 \a i_modelError parameter is /// to terminate a run() immediately and leave the PORE abstract status /// with the PORE_STATUS_MODEL_ERROR bit set. This is a fatal error; The /// model will not run instructions again until restart()-ed or the error /// is explicitly cleared with clearModelError(). /// /// \retval i_modelError This routine returns its input \a i_modelError. virtual ModelError modelError(const ModelError i_modelError); /// Clear any ModelError from the model /// /// Note that the run() method will stop immediately and not continue a /// model that has a non-0 ModelError status. This method allows the user /// to clear the ModelError status and continue execution. virtual void clearModelError(); /// Clear PORE hardware error status /// /// PORE hardware error status is stored in the PORE DBG0 and DBG1 /// registers. This procedure clears both of those registers to allow a /// simulation (or run) to continue in the event of a hardware error. /// Note that is a second hardware error occurs while the debug registers /// are "locked" (i.e., holding an error) the PORE hardware engine will /// halt. /// /// \retval me Will be non-0 if an error occurs during the procedure. virtual ModelError clearHardwareErrors(); /// Set the program counter /// /// \param i_pc The new program counter /// /// This method encapsulates the hardware register operations required to /// force-set the PORE program counter. This method first calls stop() to /// halt the PORE engine. If the engine is at a trap or breakpoint the /// stop() will take the engine back to the WAIT state, otherwise the new /// PC can not be guaranteed to take effect. The PC is then updated. The /// engine is \e not restarted (if it had been running); At the end of /// this method the hardware will always be in the stopped state. /// /// \retval me Returns 0 for success, otherwise any ModelError /// encountered during processing of the method. If the engine has /// ModelError status prior to the call the method returns that status /// immediately (does not perform the setPc() operation). virtual ModelError setPc(const PoreAddress& i_pc); /// Set the PORE address breakpoint /// /// \param[in] i_address The breakpoint address /// /// The PORE supports a single address breakpoint, and calling this API /// sets the breakpoint. If the PC ever matches the breakpoint address /// then the PORE engine stops after executing an instruction at that /// address. /// /// The PORE address breakpoint can not be explicitly disabled, however /// the constant vsbe::PORE_UNBREAKABLE_ADDRESS contains a phony address /// that can be installed as the breakpoint address to effectively disable /// the address breakpopint. /// /// \retval me Returns 0 for success, otherwise any ModelError /// encountered during processing of the method. virtual ModelError setBreakpoint(const PoreAddress& i_address); /// Enable or disable the TRAP instruction /// /// \param[in] i_enable Controls whether TRAP is enabled or disabled. /// /// By default (or when TRAP is disabled) the TRAP instruction is treated /// by the PORE hardware as a NOP. If enabled the TRAP instruction causes /// PORE execution to break in the Address-Breakpoint state, similar to an /// address breakpoint. The run() method automatically restarts execution /// if the PORE engine is stopped in this state. /// /// \retval me Will be returned non-0 if any errors are encountered /// executing the procedure. virtual ModelError enableTrap(const bool i_enable); /// Return the current abstract status /// /// \retval status The current PoreModel abstract status as an /// OR-combination of PORE_STATUS_* bits. See \ref pore_status. virtual int getStatus(); /// Return the current PoreModel error code /// /// \retval error If the PORE status includes the bit /// PORE_STATUS_MODEL_ERROR then getModelError() returns the ModelError /// responsible, otherwise 0. virtual ModelError getModelError(); /// Return the number of executed instructions /// /// \retval instrctions The total number of instructions executed since /// the model was created or restart()-ed. virtual uint64_t getInstructions(); /// Return the code supplied with the last stop() method. /// /// \retval code If the PORE status includes the bit /// PORE_STATUS_DEBUG_STOP, then getStopCode() returns the parameter /// supplied with the last stop() call that caused the PORE model to stop. virtual int getStopCode(); /// Read an integer from memory as seen by the PORE engine /// /// \param[in] i_address The PoreAddress of the memory to read, which must /// be aligned to \a i_size. /// /// \param[out] o_data The data at \a i_address, as a right-justified \a /// i_size byte unsigned integer in host endian format /// /// \param[in] i_size The data size in bytes, which must be either 1, 2, 4 /// or 8. /// /// This method performs a memory read operation on the memory space as /// seen by the PORE engine, interpreting the memory data as an \a i-size /// byte integer. The \a i_address must specify an OCI (GPEn/SLW) or /// I2C/PNOR (SBE) address (bit 0 of the address must be a 1). In /// OCI-attached engines the address will be issued on the OCI. For SBE, /// the address is issued to the addressed I2C controller (or the /// psuedo-I2C controller that provides access to the PNOR). /// /// \note Use the getscom() method to read from PIB memory spaces. /// /// \returns Either 0 for success, or a ModelError return code in the /// event of a failure. virtual ModelError getmemInteger(const PoreAddress& i_address, uint64_t& o_data, const size_t i_size); /// Write an integer to memory as seen from the PORE engine /// /// \param[in] i_address The PoreAddress of the memory to write, which /// must be aligned to \a i_size /// /// \param[in] i_data The data to write to \a i_address, as a /// right-justified \a i_size byte unsigned integer in host endian format /// /// \param[in] i_size The data size in bytes. The method currently only /// supports 8-byte writes. /// /// This method performs a memory write operation on the memory space as /// seen by the PORE engine, performing endian-conversion of \a i_data if /// necessary. The \a i_address must specify an OCI (GPEn/SLW) or /// I2C/PNOR (SBE) address (bit 0 of the address must be a 1). In /// OCI-attached engines the address will be issued on the OCI. For SBE, /// the address is issued to the addressed I2C controller (or the /// psuedo-I2C controller that provides access to the PNOR). /// /// \note Use the putscom() method to write to PIB memory spaces. /// /// \returns Either 0 for success, or a ModelError return code in the /// event of a failure. virtual ModelError putmemInteger(const PoreAddress& i_address, const uint64_t i_data, const size_t i_size); ///////////////////////// Abstract Interface ///////////////////////// /// Reset the PORE engine to its scan-flush state /// /// For PORE-SBE, the engine is reset to execute the first instruction of /// OTPROM. For the other engines, they are reset as documented and will /// not run (correctly) until the BASE address and EXE trigger registers /// are written. The PoreModel is defined to be in the reset /// (scan-flush) state at the end of constructing the model. /// /// \retval me Return values other than 0 indicate some type of error in /// the reset as defined by the ModelError enumeration. virtual ModelError flushReset() = 0; /// Step the PORE engine one instruction /// /// \param[out] o_stepped This parameter is set to true or false depending /// on whether the model successfully completed a step. /// /// Step the model 1 instruction, unless the model is presently in an /// unrecoverable error state, or the step itself causes an unrecoverable /// error. /// /// Executing a single instruction may entail many bus transactions, /// especially in the case of the SCAND command which is treated as a /// single instruction. WAIT is also considered a single instruction /// regardless of the wait count. Finally, the PORE hardware may also be /// in a state where it can not execute an instruction without an external /// control action. /// /// \retval me Return values other than 0 indicate some type of error in /// the step as defined by the ModelError enumeration. virtual ModelError step(bool& o_stepped) = 0; /// Read a user-visible control or data register \e with side effects /// /// \param[in] i_offset The register offset (see below). /// /// \param[out] o_data The returned data. For 4-byte reads the data is /// right justified. /// /// \param[in] i_size The size in bytes (see below). /// /// In general the PORE supports both 4- and 8-byte access to the control /// and data register space. Registers are specified as an enumerated /// offset into the space. Other than 8-byte reads return the data right /// justfied in the 64-bit returned value. 8-byte reads are only allowed /// for offsets that are 8-byte aligned; 4-byte reads are allowed for any /// 4-byte aligned offset. /// /// \retval me Return values other than 0 indicate some type of error in /// the access as defined by the ModelError enumeration. virtual ModelError registerRead(const PoreRegisterOffset i_offset, uint64_t& o_data, const size_t i_size = 8) = 0; /// Write a user-visible control or data register \e with side effects /// /// \param[in] i_offset The register offset (see below). /// /// \param[in] i_data The write data. For 4-byte writes the data is /// right justified. /// /// \param[in] i_size The size in bytes (see below). /// /// In general the PORE supports both 4- and 8-byte access to the control /// and data register space. Registers are specified as an enumerated /// offset into the space. 4-byte writes expect the data right justfied in /// the 64-bit input value. 8-byte writes are only allowed for offsets /// that are 8-byte aligned; 4-byte writes are allowed for any 4-byte /// aligned offset. /// /// \retval me Return values other than 0 indicate some type of error in /// the access as defined by the ModelError enumeration. virtual ModelError registerWrite(const PoreRegisterOffset i_offset, const uint64_t i_data, const size_t i_size = 8) = 0; /// Read a user-visible control or data register \e without side effects /// /// \param[in] i_offset The register offset (see below). /// /// \param[out] o_data The returned data. For 4-byte reads the data is /// right justified. /// /// \param[in] i_size The size in bytes (see below). /// /// In general the PORE supports both 4- and 8-byte access to the control /// and data register space. Registers are specified as an enumerated /// offset into the space. Other than 8-byte reads return the data right /// justfied in the 64-bit returned value. 8-byte reads are only allowed /// for offsets that are 8-byte aligned; 4-byte reads are allowed for any /// 4-byte aligned offset. /// /// \retval me Return values other than 0 indicate some type of error in /// the access as defined by the ModelError enumeration. virtual ModelError registerReadRaw(const PoreRegisterOffset i_offset, uint64_t& o_data, const size_t i_size = 8) = 0; /// Write a user-visible control or data register \e without side effects /// /// \param[in] i_offset The register offset (see below). /// /// \param[in] i_data The write data. For 4-byte writes the data is /// right justified. /// /// \param[in] i_size The size in bytes (see below). /// /// In general the PORE supports both 4- and 8-byte access to the control /// and data register space. Registers are specified as an enumerated /// offset into the space. 4-byte writes expect the data right justfied in /// the 64-bit input value. 8-byte writes are only allowed for offsets /// that are 8-byte aligned; 4-byte writes are allowed for any 4-byte /// aligned offset. /// /// \retval me Return values other than 0 indicate some type of error in /// the access as defined by the ModelError enumeration. virtual ModelError registerWriteRaw(const PoreRegisterOffset i_offset, const uint64_t i_data, const size_t i_size = 8) = 0; /// Enable or disable the HOOKI instruction /// /// \param[in] i_enable Either enable or disable the HOOKI instruction. /// /// The virtual HOOK instruction is disabled by default. This API enables /// or disables the PoreModel to call out using the PoreModel::hook() /// interface when it encounters a HOOKI instruction. /// /// \retval me Return values other than 0 indicate some type of error in /// the method as defined by the ModelError enumeration. virtual ModelError enableHookInstruction(bool i_enable) = 0; /// Enable or disable address-based hooks /// /// \param[in] i_enable Either enable or disable address-based hooks. /// /// Address-based hooks are disabled by default. This API enables or /// disables the PoreModel to call out using the readHook(), writeHook() /// and fetchHook() interfaces whenever the model is about to read, write /// or fetch respectively. /// /// \retval me Return values other than 0 indicate some type of error in /// the method as defined by the ModelError enumeration. virtual ModelError enableAddressHooks(bool i_enable) = 0; /// Extract the PORE state for checkpointing /// /// \param[out] o_state A reference to a PoreState object to receive the /// checkpointed state. /// /// This method checkpoints the state of a PORE engine into an abstract /// PoreState object, in a way that allows the state to be later restored /// with installState(). The state that is saved is only the PORE engine /// state, and does not include any other state of the PORE virtual /// environment. /// /// This method must be implemented in the derived class that implements /// other virtual methods of the PoreModel. A simulation or software /// model should be able to precisely recover to an arbitrary /// state. Physical hardware may have limited abilities to do this due to /// register side effects, read-only state etc., and may return an error /// code if PORE engine is in a state that can't be restored. /// /// Note: The PORE engine must be in the hardware-stop state /// before checkpointing in order to guarantee that the state can be /// precisely saved and restored under all conditions. /// /// \retval me Normally 0 for success, but may be set by a dervied model /// that is unable to satisfactorily extract the state for some reason. virtual ModelError extractState(PoreState& o_state) = 0; /// Install a checkpointed state /// /// \param[in] i_state A PoreState object containing the entire visible /// register state of the PORE. /// /// This method restores a PORE engine state checkpointed with /// extractState(). The state that is restored is only the PORE engine /// state. The PoreState object does not include any other state of the /// PORE virtual environment. /// /// This method must be implemented in the derived class that implements /// other virtual methods of the PoreModel. A simulation or software /// model should be able to precisely recover to an arbitrary /// state. Physical hardware may have limited abilities to do this due to /// register side effects, read-only state etc., and may return an error /// code if unable to restore the state from the PoreState object. /// /// \retval me Normally 0 for success, but may be set by a dervied model /// that is unable to satisfactorily restore a state for some reason. virtual ModelError installState(const PoreState& i_state) = 0; /// Force the PORE engine to branch (from a hook context) /// /// \param[in] i_address The address to branch to. /// /// This method is for use only by hooks, and only by address-based fetch /// hooks and hooks attached to the HOOKI instruction. This method forces /// the PORE engine to branch to an address specified in the \a i_address /// parameter. This method is only defined on software simulated models, /// and will not work on hardware models (including VHDL PORE models). To /// change the PC from a stopped state use the setPc() method which will /// work on all underlying models. /// /// The behavior varies based on the hook context: /// /// - For address-based fetch hooks, the conceptual model is that the hook /// executes before the instruction that follows the hook. Practically /// speaking, the hook may actually be invoked at the beginning of /// instruction fetch. The derived PoreModel will ensure that any fetch /// and execution of the instruction following the hook is cleanly /// abandoned, and will instead go to fetch and execute the instruction at /// \a i_address, including processing any fetch hooks on the new /// target. The hook subroutine that invokes forceBranch() will be fully /// executed before the forced branch. /// /// - For HOOKI instruction hooks the, the behavior is as if the HOOKI /// instruction were an immediate branch to the absolute target. Again, /// the hook subroutine that invokes forceBranch() will be fully executed /// before the forced branch. /// /// If forceBranch() is called from any other context, the PoreModel will /// return the ModelError ME_ILLEGAL_FORCED_BRANCH. Even so, this API is /// fraught with potential problems, e.g. ping-pong livelocks between /// fetch hooks. forceBranch() should only be used for simple, /// straightforward control flow modifications. /// /// \retval Either 0 for SUCCESS, ME_ILLEGAL_FORCED_BRANCH or any other /// non-0 ModelError that might be signalled during execution of the /// method. virtual ModelError forceBranch(const PoreAddress& i_address) = 0; //////////////////// PoreInterface Methods ///////////////////////// protected: /// Master a PIB transaction to the virtual environment /// /// \param[in,out] io_transaction The abstract PIB transaction /// /// PIB/PCB transaction status is returned in the \a iv_pcbReturnCode /// field of the transaction. A legal API call will always succeed, even /// if the underlying bus transaction fails. If the transaction is /// illegal in some way (indicating an issue in the derived PoreModel) then /// this must be trapped by the PoreInterface. void pibMaster(PibTransaction& io_transaction); /// Master an OCI transaction to the virtual environment /// /// \param[in,out] io_transaction The abstract OCI transaction /// /// OCI transaction status is returned in the \a iv_ociReturnCode field of /// the transaction. A legal API call will always succeed, even if the /// underlying bus transaction fails. If the transaction is illegal in /// some way (indicating an issue in the derived PoreModel) then this must /// be trapped by the PoreInterface. void ociMaster(OciTransaction& io_transaction); /// Implement the WAIT instruction /// /// \param[in] i_count The number of PORE cycles to wait. /// /// The PoreModel has no notion of time, so it requires the environment to /// model WAIT. The hardware treats WAIT 0 as a special HALT instruction, /// however the concrete model still must still invoke the wait() method /// in this case to inform the abstract model that the PORE has halted /// (since this generates a hardware interrupt). Since WAIT can not fail /// in the hardware, any errors in the implementation of wait() must be /// handled by the PoreInterface. void wait(const uint32_t i_count); /// Implement the HOOKI instruction /// /// \param[in] i_address The effective address of the HOOKI instruction. /// /// \param[in] i_hook The low-order 24 bits of the HOOKI instruction, used /// to index the hook routine that will process the hook. /// /// \param[in] i_parameter A 64-bit parameter for the hook routine. /// /// The HOOKI instruction allows the PORE code to invoke a fixed set of /// hooks indexed by a 24-bit integer code, passing each hook routine a /// 64-bit parameter. The underlying hardware model has no dependency on /// the outcome of running a hook, therefore any errors in hook processing /// must be handled by the PoreInterface. void hookInstruction(const PoreAddress& i_address, const uint32_t i_hook, const uint64_t i_parameter); /// Notify the environment of a data read /// /// \param[in] i_address The effective PoreAddress of the data read. /// /// This method allows the environment to hook data reads. For direct /// memory accesses, this method is called immediately before a bus /// transaction is generated to fetch the data at the given effective /// address. For indirect PIB transactions, this method is invoked prior /// to the I2C transaction that actually reads the data from the I2C /// memory. The underlying hardware model has no dependency on the outcome /// of running a hook, therefore any errors in hook processing must be /// handled by the PoreInterface. void hookRead(const PoreAddress& i_address); /// Notify the environment of a data write /// /// \param[in] i_address The effective PoreAddress of the data write. /// /// This method allows the environment to hook data writes. For direct /// memory accesses, this method is called immediately before a bus /// transaction is generated to store the data at the given effective /// address. For indirect PIB transactions, this method is invoked prior /// to the I2C transaction that actually writes the data to the I2C /// memory. The underlying hardware model has no dependency on the outcome /// of running a hook, therefore any errors in hook processing must be /// handled by the PoreInterface. void hookWrite(const PoreAddress& i_address); /// Notify the environment of an instruction fetch /// /// \param[in] i_address The effective PoreAddress of the instruction. /// /// This method allows the environment to hook instruction fetches, where /// an instruction fetch refers to reading the first 4 (of potentially 12) /// bytes that comprise an instruction for the purposes of instruction /// execution. For direct memory accesses, this method is called /// immediately before a bus transaction is generated to read the /// instruction data at the given effective address. For indirect PIB /// transactions, this method is invoked prior to the I2C transaction that /// actually fetches the data from the I2C memory. The underlying hardware /// model has no dependency on the outcome of running a hook, therefore /// any errors in hook processing must be handled by the PoreInterface. void hookFetch(const PoreAddress& i_address); /// Notify the environment of the error interrupt /// /// This method must be invoked by the deribed PoreModel whenever the /// execution of an instruction would pulse the PORE hardware \a /// tp_por_error_out signal. void errorIntr(void); /// Notify the environment of the fatal error interrupt /// /// This method must be invoked by the derived PoreModel whenever the /// execution of an instruction would pulse the PORE hardware \a /// tp_por_fatal_error_out signal. void fatalErrorIntr(void); ////////////////////////////// Creators ////////////////////////////// public: /// Create the PoreModel base class /// /// The PoreModel is defined to be in the restart (scan-flush) state at /// the end of constructing the model. /// /// \param[in] i_ibufId The IBUF ID (PORE type) of the model. /// /// \param[in] i_interface A pointer back to the PoreInterface that /// created this model. PoreModel(const PoreIbufId i_ibufId, PoreInterface* i_interface); virtual ~PoreModel(); ////////////////////////// Implementation //////////////////////////// protected: /// The IBUF ID (engine type) of the PORE being modeled PoreIbufId iv_ibufId; private: /// The most recent ModelError generated by the model or by calls of /// modelError(), or 0 for no error. ModelError iv_modelError; /// The total number of instructions executed since the PoreModel /// was created, or since the last restart() operation. uint64_t iv_instructions; /// The most recent stop code provided in a call of stop(), or 0 if the /// model is not stopped. int iv_stopCode; /// Pointer back to the virtual interface containing this hardware model. PoreInterface* iv_interface; ///////////////////////////// Safety ////////////////////////////////// private: PoreModel(const PoreModel& i_rhs); PoreModel& operator=(const PoreModel& i_rhs); }; #endif // __VSBE_POREMODEL_H
apache-2.0
aws/aws-sdk-java
aws-java-sdk-sagemaker/src/main/java/com/amazonaws/services/sagemaker/model/ProcessingS3CompressionType.java
1845
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.sagemaker.model; import javax.annotation.Generated; /** * */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public enum ProcessingS3CompressionType { None("None"), Gzip("Gzip"); private String value; private ProcessingS3CompressionType(String value) { this.value = value; } @Override public String toString() { return this.value; } /** * Use this in place of valueOf. * * @param value * real value * @return ProcessingS3CompressionType corresponding to the value * * @throws IllegalArgumentException * If the specified value does not map to one of the known values in this enum. */ public static ProcessingS3CompressionType fromValue(String value) { if (value == null || "".equals(value)) { throw new IllegalArgumentException("Value cannot be null or empty!"); } for (ProcessingS3CompressionType enumEntry : ProcessingS3CompressionType.values()) { if (enumEntry.toString().equals(value)) { return enumEntry; } } throw new IllegalArgumentException("Cannot create enum from " + value + " value!"); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-workmail/src/main/java/com/amazonaws/services/workmail/model/transform/DescribeOrganizationRequestMarshaller.java
2086
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.workmail.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.workmail.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * DescribeOrganizationRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class DescribeOrganizationRequestMarshaller { private static final MarshallingInfo<String> ORGANIZATIONID_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("OrganizationId").build(); private static final DescribeOrganizationRequestMarshaller instance = new DescribeOrganizationRequestMarshaller(); public static DescribeOrganizationRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(DescribeOrganizationRequest describeOrganizationRequest, ProtocolMarshaller protocolMarshaller) { if (describeOrganizationRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeOrganizationRequest.getOrganizationId(), ORGANIZATIONID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0