repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
manuelbuil/proxy
go/envoy/config/tap/v3/common.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: envoy/config/tap/v3/common.proto package envoy_config_tap_v3 import ( fmt "fmt" v3 "github.com/cilium/proxy/go/envoy/config/core/v3" v31 "github.com/cilium/proxy/go/envoy/config/route/v3" _ "github.com/cncf/udpa/go/udpa/annotations" _ "github.com/envoyproxy/protoc-gen-validate/validate" proto "github.com/golang/protobuf/proto" wrappers "github.com/golang/protobuf/ptypes/wrappers" math "math" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // Output format. All output is in the form of one or more :ref:`TraceWrapper // <envoy_api_msg_data.tap.v3.TraceWrapper>` messages. This enumeration indicates // how those messages are written. Note that not all sinks support all output formats. See // individual sink documentation for more information. type OutputSink_Format int32 const ( // Each message will be written as JSON. Any :ref:`body <envoy_api_msg_data.tap.v3.Body>` // data will be present in the :ref:`as_bytes // <envoy_api_field_data.tap.v3.Body.as_bytes>` field. This means that body data will be // base64 encoded as per the `proto3 JSON mappings // <https://developers.google.com/protocol-buffers/docs/proto3#json>`_. OutputSink_JSON_BODY_AS_BYTES OutputSink_Format = 0 // Each message will be written as JSON. Any :ref:`body <envoy_api_msg_data.tap.v3.Body>` // data will be present in the :ref:`as_string // <envoy_api_field_data.tap.v3.Body.as_string>` field. This means that body data will be // string encoded as per the `proto3 JSON mappings // <https://developers.google.com/protocol-buffers/docs/proto3#json>`_. This format type is // useful when it is known that that body is human readable (e.g., JSON over HTTP) and the // user wishes to view it directly without being forced to base64 decode the body. OutputSink_JSON_BODY_AS_STRING OutputSink_Format = 1 // Binary proto format. Note that binary proto is not self-delimiting. If a sink writes // multiple binary messages without any length information the data stream will not be // useful. However, for certain sinks that are self-delimiting (e.g., one message per file) // this output format makes consumption simpler. OutputSink_PROTO_BINARY OutputSink_Format = 2 // Messages are written as a sequence tuples, where each tuple is the message length encoded // as a `protobuf 32-bit varint // <https://developers.google.com/protocol-buffers/docs/reference/cpp/google.protobuf.io.coded_stream>`_ // followed by the binary message. The messages can be read back using the language specific // protobuf coded stream implementation to obtain the message length and the message. OutputSink_PROTO_BINARY_LENGTH_DELIMITED OutputSink_Format = 3 // Text proto format. OutputSink_PROTO_TEXT OutputSink_Format = 4 ) var OutputSink_Format_name = map[int32]string{ 0: "JSON_BODY_AS_BYTES", 1: "JSON_BODY_AS_STRING", 2: "PROTO_BINARY", 3: "PROTO_BINARY_LENGTH_DELIMITED", 4: "PROTO_TEXT", } var OutputSink_Format_value = map[string]int32{ "JSON_BODY_AS_BYTES": 0, "JSON_BODY_AS_STRING": 1, "PROTO_BINARY": 2, "PROTO_BINARY_LENGTH_DELIMITED": 3, "PROTO_TEXT": 4, } func (x OutputSink_Format) String() string { return proto.EnumName(OutputSink_Format_name, int32(x)) } func (OutputSink_Format) EnumDescriptor() ([]byte, []int) { return fileDescriptor_accadd4ee8324475, []int{4, 0} } // Tap configuration. type TapConfig struct { // The match configuration. If the configuration matches the data source being tapped, a tap will // occur, with the result written to the configured output. MatchConfig *MatchPredicate `protobuf:"bytes,1,opt,name=match_config,json=matchConfig,proto3" json:"match_config,omitempty"` // The tap output configuration. If a match configuration matches a data source being tapped, // a tap will occur and the data will be written to the configured output. OutputConfig *OutputConfig `protobuf:"bytes,2,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` // [#not-implemented-hide:] Specify if Tap matching is enabled. The % of requests\connections for // which the tap matching is enabled. When not enabled, the request\connection will not be // recorded. // // .. note:: // // This field defaults to 100/:ref:`HUNDRED // <envoy_api_enum_type.v3.FractionalPercent.DenominatorType>`. TapEnabled *v3.RuntimeFractionalPercent `protobuf:"bytes,3,opt,name=tap_enabled,json=tapEnabled,proto3" json:"tap_enabled,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *TapConfig) Reset() { *m = TapConfig{} } func (m *TapConfig) String() string { return proto.CompactTextString(m) } func (*TapConfig) ProtoMessage() {} func (*TapConfig) Descriptor() ([]byte, []int) { return fileDescriptor_accadd4ee8324475, []int{0} } func (m *TapConfig) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TapConfig.Unmarshal(m, b) } func (m *TapConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_TapConfig.Marshal(b, m, deterministic) } func (m *TapConfig) XXX_Merge(src proto.Message) { xxx_messageInfo_TapConfig.Merge(m, src) } func (m *TapConfig) XXX_Size() int { return xxx_messageInfo_TapConfig.Size(m) } func (m *TapConfig) XXX_DiscardUnknown() { xxx_messageInfo_TapConfig.DiscardUnknown(m) } var xxx_messageInfo_TapConfig proto.InternalMessageInfo func (m *TapConfig) GetMatchConfig() *MatchPredicate { if m != nil { return m.MatchConfig } return nil } func (m *TapConfig) GetOutputConfig() *OutputConfig { if m != nil { return m.OutputConfig } return nil } func (m *TapConfig) GetTapEnabled() *v3.RuntimeFractionalPercent { if m != nil { return m.TapEnabled } return nil } // Tap match configuration. This is a recursive structure which allows complex nested match // configurations to be built using various logical operators. // [#next-free-field: 9] type MatchPredicate struct { // Types that are valid to be assigned to Rule: // *MatchPredicate_OrMatch // *MatchPredicate_AndMatch // *MatchPredicate_NotMatch // *MatchPredicate_AnyMatch // *MatchPredicate_HttpRequestHeadersMatch // *MatchPredicate_HttpRequestTrailersMatch // *MatchPredicate_HttpResponseHeadersMatch // *MatchPredicate_HttpResponseTrailersMatch Rule isMatchPredicate_Rule `protobuf_oneof:"rule"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *MatchPredicate) Reset() { *m = MatchPredicate{} } func (m *MatchPredicate) String() string { return proto.CompactTextString(m) } func (*MatchPredicate) ProtoMessage() {} func (*MatchPredicate) Descriptor() ([]byte, []int) { return fileDescriptor_accadd4ee8324475, []int{1} } func (m *MatchPredicate) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_MatchPredicate.Unmarshal(m, b) } func (m *MatchPredicate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_MatchPredicate.Marshal(b, m, deterministic) } func (m *MatchPredicate) XXX_Merge(src proto.Message) { xxx_messageInfo_MatchPredicate.Merge(m, src) } func (m *MatchPredicate) XXX_Size() int { return xxx_messageInfo_MatchPredicate.Size(m) } func (m *MatchPredicate) XXX_DiscardUnknown() { xxx_messageInfo_MatchPredicate.DiscardUnknown(m) } var xxx_messageInfo_MatchPredicate proto.InternalMessageInfo type isMatchPredicate_Rule interface { isMatchPredicate_Rule() } type MatchPredicate_OrMatch struct { OrMatch *MatchPredicate_MatchSet `protobuf:"bytes,1,opt,name=or_match,json=orMatch,proto3,oneof"` } type MatchPredicate_AndMatch struct { AndMatch *MatchPredicate_MatchSet `protobuf:"bytes,2,opt,name=and_match,json=andMatch,proto3,oneof"` } type MatchPredicate_NotMatch struct { NotMatch *MatchPredicate `protobuf:"bytes,3,opt,name=not_match,json=notMatch,proto3,oneof"` } type MatchPredicate_AnyMatch struct { AnyMatch bool `protobuf:"varint,4,opt,name=any_match,json=anyMatch,proto3,oneof"` } type MatchPredicate_HttpRequestHeadersMatch struct { HttpRequestHeadersMatch *HttpHeadersMatch `protobuf:"bytes,5,opt,name=http_request_headers_match,json=httpRequestHeadersMatch,proto3,oneof"` } type MatchPredicate_HttpRequestTrailersMatch struct { HttpRequestTrailersMatch *HttpHeadersMatch `protobuf:"bytes,6,opt,name=http_request_trailers_match,json=httpRequestTrailersMatch,proto3,oneof"` } type MatchPredicate_HttpResponseHeadersMatch struct { HttpResponseHeadersMatch *HttpHeadersMatch `protobuf:"bytes,7,opt,name=http_response_headers_match,json=httpResponseHeadersMatch,proto3,oneof"` } type MatchPredicate_HttpResponseTrailersMatch struct { HttpResponseTrailersMatch *HttpHeadersMatch `protobuf:"bytes,8,opt,name=http_response_trailers_match,json=httpResponseTrailersMatch,proto3,oneof"` } func (*MatchPredicate_OrMatch) isMatchPredicate_Rule() {} func (*MatchPredicate_AndMatch) isMatchPredicate_Rule() {} func (*MatchPredicate_NotMatch) isMatchPredicate_Rule() {} func (*MatchPredicate_AnyMatch) isMatchPredicate_Rule() {} func (*MatchPredicate_HttpRequestHeadersMatch) isMatchPredicate_Rule() {} func (*MatchPredicate_HttpRequestTrailersMatch) isMatchPredicate_Rule() {} func (*MatchPredicate_HttpResponseHeadersMatch) isMatchPredicate_Rule() {} func (*MatchPredicate_HttpResponseTrailersMatch) isMatchPredicate_Rule() {} func (m *MatchPredicate) GetRule() isMatchPredicate_Rule { if m != nil { return m.Rule } return nil } func (m *MatchPredicate) GetOrMatch() *MatchPredicate_MatchSet { if x, ok := m.GetRule().(*MatchPredicate_OrMatch); ok { return x.OrMatch } return nil } func (m *MatchPredicate) GetAndMatch() *MatchPredicate_MatchSet { if x, ok := m.GetRule().(*MatchPredicate_AndMatch); ok { return x.AndMatch } return nil } func (m *MatchPredicate) GetNotMatch() *MatchPredicate { if x, ok := m.GetRule().(*MatchPredicate_NotMatch); ok { return x.NotMatch } return nil } func (m *MatchPredicate) GetAnyMatch() bool { if x, ok := m.GetRule().(*MatchPredicate_AnyMatch); ok { return x.AnyMatch } return false } func (m *MatchPredicate) GetHttpRequestHeadersMatch() *HttpHeadersMatch { if x, ok := m.GetRule().(*MatchPredicate_HttpRequestHeadersMatch); ok { return x.HttpRequestHeadersMatch } return nil } func (m *MatchPredicate) GetHttpRequestTrailersMatch() *HttpHeadersMatch { if x, ok := m.GetRule().(*MatchPredicate_HttpRequestTrailersMatch); ok { return x.HttpRequestTrailersMatch } return nil } func (m *MatchPredicate) GetHttpResponseHeadersMatch() *HttpHeadersMatch { if x, ok := m.GetRule().(*MatchPredicate_HttpResponseHeadersMatch); ok { return x.HttpResponseHeadersMatch } return nil } func (m *MatchPredicate) GetHttpResponseTrailersMatch() *HttpHeadersMatch { if x, ok := m.GetRule().(*MatchPredicate_HttpResponseTrailersMatch); ok { return x.HttpResponseTrailersMatch } return nil } // XXX_OneofWrappers is for the internal use of the proto package. func (*MatchPredicate) XXX_OneofWrappers() []interface{} { return []interface{}{ (*MatchPredicate_OrMatch)(nil), (*MatchPredicate_AndMatch)(nil), (*MatchPredicate_NotMatch)(nil), (*MatchPredicate_AnyMatch)(nil), (*MatchPredicate_HttpRequestHeadersMatch)(nil), (*MatchPredicate_HttpRequestTrailersMatch)(nil), (*MatchPredicate_HttpResponseHeadersMatch)(nil), (*MatchPredicate_HttpResponseTrailersMatch)(nil), } } // A set of match configurations used for logical operations. type MatchPredicate_MatchSet struct { // The list of rules that make up the set. Rules []*MatchPredicate `protobuf:"bytes,1,rep,name=rules,proto3" json:"rules,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *MatchPredicate_MatchSet) Reset() { *m = MatchPredicate_MatchSet{} } func (m *MatchPredicate_MatchSet) String() string { return proto.CompactTextString(m) } func (*MatchPredicate_MatchSet) ProtoMessage() {} func (*MatchPredicate_MatchSet) Descriptor() ([]byte, []int) { return fileDescriptor_accadd4ee8324475, []int{1, 0} } func (m *MatchPredicate_MatchSet) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_MatchPredicate_MatchSet.Unmarshal(m, b) } func (m *MatchPredicate_MatchSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_MatchPredicate_MatchSet.Marshal(b, m, deterministic) } func (m *MatchPredicate_MatchSet) XXX_Merge(src proto.Message) { xxx_messageInfo_MatchPredicate_MatchSet.Merge(m, src) } func (m *MatchPredicate_MatchSet) XXX_Size() int { return xxx_messageInfo_MatchPredicate_MatchSet.Size(m) } func (m *MatchPredicate_MatchSet) XXX_DiscardUnknown() { xxx_messageInfo_MatchPredicate_MatchSet.DiscardUnknown(m) } var xxx_messageInfo_MatchPredicate_MatchSet proto.InternalMessageInfo func (m *MatchPredicate_MatchSet) GetRules() []*MatchPredicate { if m != nil { return m.Rules } return nil } // HTTP headers match configuration. type HttpHeadersMatch struct { // HTTP headers to match. Headers []*v31.HeaderMatcher `protobuf:"bytes,1,rep,name=headers,proto3" json:"headers,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *HttpHeadersMatch) Reset() { *m = HttpHeadersMatch{} } func (m *HttpHeadersMatch) String() string { return proto.CompactTextString(m) } func (*HttpHeadersMatch) ProtoMessage() {} func (*HttpHeadersMatch) Descriptor() ([]byte, []int) { return fileDescriptor_accadd4ee8324475, []int{2} } func (m *HttpHeadersMatch) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_HttpHeadersMatch.Unmarshal(m, b) } func (m *HttpHeadersMatch) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_HttpHeadersMatch.Marshal(b, m, deterministic) } func (m *HttpHeadersMatch) XXX_Merge(src proto.Message) { xxx_messageInfo_HttpHeadersMatch.Merge(m, src) } func (m *HttpHeadersMatch) XXX_Size() int { return xxx_messageInfo_HttpHeadersMatch.Size(m) } func (m *HttpHeadersMatch) XXX_DiscardUnknown() { xxx_messageInfo_HttpHeadersMatch.DiscardUnknown(m) } var xxx_messageInfo_HttpHeadersMatch proto.InternalMessageInfo func (m *HttpHeadersMatch) GetHeaders() []*v31.HeaderMatcher { if m != nil { return m.Headers } return nil } // Tap output configuration. type OutputConfig struct { // Output sinks for tap data. Currently a single sink is allowed in the list. Once multiple // sink types are supported this constraint will be relaxed. Sinks []*OutputSink `protobuf:"bytes,1,rep,name=sinks,proto3" json:"sinks,omitempty"` // For buffered tapping, the maximum amount of received body that will be buffered prior to // truncation. If truncation occurs, the :ref:`truncated // <envoy_api_field_data.tap.v3.Body.truncated>` field will be set. If not specified, the // default is 1KiB. MaxBufferedRxBytes *wrappers.UInt32Value `protobuf:"bytes,2,opt,name=max_buffered_rx_bytes,json=maxBufferedRxBytes,proto3" json:"max_buffered_rx_bytes,omitempty"` // For buffered tapping, the maximum amount of transmitted body that will be buffered prior to // truncation. If truncation occurs, the :ref:`truncated // <envoy_api_field_data.tap.v3.Body.truncated>` field will be set. If not specified, the // default is 1KiB. MaxBufferedTxBytes *wrappers.UInt32Value `protobuf:"bytes,3,opt,name=max_buffered_tx_bytes,json=maxBufferedTxBytes,proto3" json:"max_buffered_tx_bytes,omitempty"` // Indicates whether taps produce a single buffered message per tap, or multiple streamed // messages per tap in the emitted :ref:`TraceWrapper // <envoy_api_msg_data.tap.v3.TraceWrapper>` messages. Note that streamed tapping does not // mean that no buffering takes place. Buffering may be required if data is processed before a // match can be determined. See the HTTP tap filter :ref:`streaming // <config_http_filters_tap_streaming>` documentation for more information. Streaming bool `protobuf:"varint,4,opt,name=streaming,proto3" json:"streaming,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *OutputConfig) Reset() { *m = OutputConfig{} } func (m *OutputConfig) String() string { return proto.CompactTextString(m) } func (*OutputConfig) ProtoMessage() {} func (*OutputConfig) Descriptor() ([]byte, []int) { return fileDescriptor_accadd4ee8324475, []int{3} } func (m *OutputConfig) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_OutputConfig.Unmarshal(m, b) } func (m *OutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_OutputConfig.Marshal(b, m, deterministic) } func (m *OutputConfig) XXX_Merge(src proto.Message) { xxx_messageInfo_OutputConfig.Merge(m, src) } func (m *OutputConfig) XXX_Size() int { return xxx_messageInfo_OutputConfig.Size(m) } func (m *OutputConfig) XXX_DiscardUnknown() { xxx_messageInfo_OutputConfig.DiscardUnknown(m) } var xxx_messageInfo_OutputConfig proto.InternalMessageInfo func (m *OutputConfig) GetSinks() []*OutputSink { if m != nil { return m.Sinks } return nil } func (m *OutputConfig) GetMaxBufferedRxBytes() *wrappers.UInt32Value { if m != nil { return m.MaxBufferedRxBytes } return nil } func (m *OutputConfig) GetMaxBufferedTxBytes() *wrappers.UInt32Value { if m != nil { return m.MaxBufferedTxBytes } return nil } func (m *OutputConfig) GetStreaming() bool { if m != nil { return m.Streaming } return false } // Tap output sink configuration. type OutputSink struct { // Sink output format. Format OutputSink_Format `protobuf:"varint,1,opt,name=format,proto3,enum=envoy.config.tap.v3.OutputSink_Format" json:"format,omitempty"` // Types that are valid to be assigned to OutputSinkType: // *OutputSink_StreamingAdmin // *OutputSink_FilePerTap // *OutputSink_StreamingGrpc OutputSinkType isOutputSink_OutputSinkType `protobuf_oneof:"output_sink_type"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *OutputSink) Reset() { *m = OutputSink{} } func (m *OutputSink) String() string { return proto.CompactTextString(m) } func (*OutputSink) ProtoMessage() {} func (*OutputSink) Descriptor() ([]byte, []int) { return fileDescriptor_accadd4ee8324475, []int{4} } func (m *OutputSink) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_OutputSink.Unmarshal(m, b) } func (m *OutputSink) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_OutputSink.Marshal(b, m, deterministic) } func (m *OutputSink) XXX_Merge(src proto.Message) { xxx_messageInfo_OutputSink.Merge(m, src) } func (m *OutputSink) XXX_Size() int { return xxx_messageInfo_OutputSink.Size(m) } func (m *OutputSink) XXX_DiscardUnknown() { xxx_messageInfo_OutputSink.DiscardUnknown(m) } var xxx_messageInfo_OutputSink proto.InternalMessageInfo func (m *OutputSink) GetFormat() OutputSink_Format { if m != nil { return m.Format } return OutputSink_JSON_BODY_AS_BYTES } type isOutputSink_OutputSinkType interface { isOutputSink_OutputSinkType() } type OutputSink_StreamingAdmin struct { StreamingAdmin *StreamingAdminSink `protobuf:"bytes,2,opt,name=streaming_admin,json=streamingAdmin,proto3,oneof"` } type OutputSink_FilePerTap struct { FilePerTap *FilePerTapSink `protobuf:"bytes,3,opt,name=file_per_tap,json=filePerTap,proto3,oneof"` } type OutputSink_StreamingGrpc struct { StreamingGrpc *StreamingGrpcSink `protobuf:"bytes,4,opt,name=streaming_grpc,json=streamingGrpc,proto3,oneof"` } func (*OutputSink_StreamingAdmin) isOutputSink_OutputSinkType() {} func (*OutputSink_FilePerTap) isOutputSink_OutputSinkType() {} func (*OutputSink_StreamingGrpc) isOutputSink_OutputSinkType() {} func (m *OutputSink) GetOutputSinkType() isOutputSink_OutputSinkType { if m != nil { return m.OutputSinkType } return nil } func (m *OutputSink) GetStreamingAdmin() *StreamingAdminSink { if x, ok := m.GetOutputSinkType().(*OutputSink_StreamingAdmin); ok { return x.StreamingAdmin } return nil } func (m *OutputSink) GetFilePerTap() *FilePerTapSink { if x, ok := m.GetOutputSinkType().(*OutputSink_FilePerTap); ok { return x.FilePerTap } return nil } func (m *OutputSink) GetStreamingGrpc() *StreamingGrpcSink { if x, ok := m.GetOutputSinkType().(*OutputSink_StreamingGrpc); ok { return x.StreamingGrpc } return nil } // XXX_OneofWrappers is for the internal use of the proto package. func (*OutputSink) XXX_OneofWrappers() []interface{} { return []interface{}{ (*OutputSink_StreamingAdmin)(nil), (*OutputSink_FilePerTap)(nil), (*OutputSink_StreamingGrpc)(nil), } } // Streaming admin sink configuration. type StreamingAdminSink struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *StreamingAdminSink) Reset() { *m = StreamingAdminSink{} } func (m *StreamingAdminSink) String() string { return proto.CompactTextString(m) } func (*StreamingAdminSink) ProtoMessage() {} func (*StreamingAdminSink) Descriptor() ([]byte, []int) { return fileDescriptor_accadd4ee8324475, []int{5} } func (m *StreamingAdminSink) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StreamingAdminSink.Unmarshal(m, b) } func (m *StreamingAdminSink) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_StreamingAdminSink.Marshal(b, m, deterministic) } func (m *StreamingAdminSink) XXX_Merge(src proto.Message) { xxx_messageInfo_StreamingAdminSink.Merge(m, src) } func (m *StreamingAdminSink) XXX_Size() int { return xxx_messageInfo_StreamingAdminSink.Size(m) } func (m *StreamingAdminSink) XXX_DiscardUnknown() { xxx_messageInfo_StreamingAdminSink.DiscardUnknown(m) } var xxx_messageInfo_StreamingAdminSink proto.InternalMessageInfo // The file per tap sink outputs a discrete file for every tapped stream. type FilePerTapSink struct { // Path prefix. The output file will be of the form <path_prefix>_<id>.pb, where <id> is an // identifier distinguishing the recorded trace for stream instances (the Envoy // connection ID, HTTP stream ID, etc.). PathPrefix string `protobuf:"bytes,1,opt,name=path_prefix,json=pathPrefix,proto3" json:"path_prefix,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *FilePerTapSink) Reset() { *m = FilePerTapSink{} } func (m *FilePerTapSink) String() string { return proto.CompactTextString(m) } func (*FilePerTapSink) ProtoMessage() {} func (*FilePerTapSink) Descriptor() ([]byte, []int) { return fileDescriptor_accadd4ee8324475, []int{6} } func (m *FilePerTapSink) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_FilePerTapSink.Unmarshal(m, b) } func (m *FilePerTapSink) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_FilePerTapSink.Marshal(b, m, deterministic) } func (m *FilePerTapSink) XXX_Merge(src proto.Message) { xxx_messageInfo_FilePerTapSink.Merge(m, src) } func (m *FilePerTapSink) XXX_Size() int { return xxx_messageInfo_FilePerTapSink.Size(m) } func (m *FilePerTapSink) XXX_DiscardUnknown() { xxx_messageInfo_FilePerTapSink.DiscardUnknown(m) } var xxx_messageInfo_FilePerTapSink proto.InternalMessageInfo func (m *FilePerTapSink) GetPathPrefix() string { if m != nil { return m.PathPrefix } return "" } // [#not-implemented-hide:] Streaming gRPC sink configuration sends the taps to an external gRPC // server. type StreamingGrpcSink struct { // Opaque identifier, that will be sent back to the streaming grpc server. TapId string `protobuf:"bytes,1,opt,name=tap_id,json=tapId,proto3" json:"tap_id,omitempty"` // The gRPC server that hosts the Tap Sink Service. GrpcService *v3.GrpcService `protobuf:"bytes,2,opt,name=grpc_service,json=grpcService,proto3" json:"grpc_service,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *StreamingGrpcSink) Reset() { *m = StreamingGrpcSink{} } func (m *StreamingGrpcSink) String() string { return proto.CompactTextString(m) } func (*StreamingGrpcSink) ProtoMessage() {} func (*StreamingGrpcSink) Descriptor() ([]byte, []int) { return fileDescriptor_accadd4ee8324475, []int{7} } func (m *StreamingGrpcSink) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StreamingGrpcSink.Unmarshal(m, b) } func (m *StreamingGrpcSink) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_StreamingGrpcSink.Marshal(b, m, deterministic) } func (m *StreamingGrpcSink) XXX_Merge(src proto.Message) { xxx_messageInfo_StreamingGrpcSink.Merge(m, src) } func (m *StreamingGrpcSink) XXX_Size() int { return xxx_messageInfo_StreamingGrpcSink.Size(m) } func (m *StreamingGrpcSink) XXX_DiscardUnknown() { xxx_messageInfo_StreamingGrpcSink.DiscardUnknown(m) } var xxx_messageInfo_StreamingGrpcSink proto.InternalMessageInfo func (m *StreamingGrpcSink) GetTapId() string { if m != nil { return m.TapId } return "" } func (m *StreamingGrpcSink) GetGrpcService() *v3.GrpcService { if m != nil { return m.GrpcService } return nil } func init() { proto.RegisterEnum("envoy.config.tap.v3.OutputSink_Format", OutputSink_Format_name, OutputSink_Format_value) proto.RegisterType((*TapConfig)(nil), "envoy.config.tap.v3.TapConfig") proto.RegisterType((*MatchPredicate)(nil), "envoy.config.tap.v3.MatchPredicate") proto.RegisterType((*MatchPredicate_MatchSet)(nil), "envoy.config.tap.v3.MatchPredicate.MatchSet") proto.RegisterType((*HttpHeadersMatch)(nil), "envoy.config.tap.v3.HttpHeadersMatch") proto.RegisterType((*OutputConfig)(nil), "envoy.config.tap.v3.OutputConfig") proto.RegisterType((*OutputSink)(nil), "envoy.config.tap.v3.OutputSink") proto.RegisterType((*StreamingAdminSink)(nil), "envoy.config.tap.v3.StreamingAdminSink") proto.RegisterType((*FilePerTapSink)(nil), "envoy.config.tap.v3.FilePerTapSink") proto.RegisterType((*StreamingGrpcSink)(nil), "envoy.config.tap.v3.StreamingGrpcSink") } func init() { proto.RegisterFile("envoy/config/tap/v3/common.proto", fileDescriptor_accadd4ee8324475) } var fileDescriptor_accadd4ee8324475 = []byte{ // 1147 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x96, 0x4d, 0x6f, 0x1b, 0x45, 0x18, 0xc7, 0xb3, 0x1b, 0xc7, 0x71, 0x1e, 0xbb, 0xc1, 0x4c, 0x55, 0x12, 0x42, 0x5f, 0x12, 0xb7, 0x4d, 0xa3, 0x36, 0x5d, 0xab, 0xf1, 0x05, 0xf9, 0x80, 0x94, 0x6d, 0xdd, 0xd8, 0xd0, 0xc6, 0xd6, 0xda, 0x20, 0x7a, 0x5a, 0x8d, 0xbd, 0x63, 0x7b, 0xa9, 0xbd, 0x33, 0xcc, 0x8e, 0x8d, 0x7d, 0x41, 0x08, 0x09, 0x09, 0x7a, 0xec, 0x91, 0xcf, 0x80, 0xf8, 0x00, 0x9c, 0xb8, 0x20, 0x71, 0xe5, 0xdb, 0xa0, 0x1e, 0x10, 0x9a, 0x99, 0xdd, 0xc4, 0x9b, 0xd8, 0x6d, 0xd2, 0xdb, 0xee, 0xcc, 0xff, 0xf9, 0x3d, 0x6f, 0xb3, 0xcf, 0x0e, 0x6c, 0x93, 0x60, 0x4c, 0xa7, 0xc5, 0x0e, 0x0d, 0xba, 0x7e, 0xaf, 0x28, 0x30, 0x2b, 0x8e, 0x4b, 0xc5, 0x0e, 0x1d, 0x0e, 0x69, 0x60, 0x31, 0x4e, 0x05, 0x45, 0x57, 0x95, 0xc2, 0xd2, 0x0a, 0x4b, 0x60, 0x66, 0x8d, 0x4b, 0x5b, 0xb7, 0x12, 0x66, 0x1d, 0xca, 0x89, 0xb4, 0x6b, 0xe3, 0x90, 0x68, 0xab, 0xad, 0x7b, 0x73, 0x05, 0x3d, 0xce, 0x3a, 0x6e, 0x48, 0xf8, 0xd8, 0xef, 0xc4, 0xc2, 0xfd, 0x84, 0x90, 0xd3, 0x91, 0x50, 0x4a, 0xf5, 0xe0, 0x76, 0xe8, 0x90, 0xd1, 0x80, 0x04, 0x22, 0x8c, 0xd4, 0x37, 0x7b, 0x94, 0xf6, 0x06, 0xa4, 0xa8, 0xde, 0xda, 0xa3, 0x6e, 0xf1, 0x3b, 0x8e, 0x19, 0x23, 0x3c, 0xde, 0xbf, 0x31, 0xf2, 0x18, 0x2e, 0xe2, 0x20, 0xa0, 0x02, 0x0b, 0x9f, 0x06, 0x61, 0x31, 0x14, 0x58, 0x8c, 0xe2, 0xed, 0x9d, 0x73, 0xdb, 0x63, 0xc2, 0x43, 0x9f, 0x06, 0x7e, 0xd0, 0x8b, 0x24, 0x1b, 0x63, 0x3c, 0xf0, 0x3d, 0x2c, 0xa3, 0x88, 0x1e, 0xf4, 0x46, 0xe1, 0x37, 0x13, 0xd6, 0x5a, 0x98, 0x3d, 0x56, 0x81, 0xa2, 0x06, 0xe4, 0x86, 0x58, 0x74, 0xfa, 0xae, 0x0e, 0x7c, 0xd3, 0xd8, 0x36, 0xf6, 0xb2, 0x07, 0xb7, 0xad, 0x39, 0xc5, 0xb2, 0x9e, 0x4b, 0x61, 0x83, 0x13, 0xcf, 0xef, 0x60, 0x41, 0xec, 0xcc, 0x1b, 0x7b, 0xe5, 0x95, 0x61, 0xe6, 0x0d, 0x27, 0xab, 0x10, 0x27, 0xc4, 0x2b, 0x74, 0x24, 0xd8, 0x48, 0xc4, 0x48, 0x53, 0x21, 0x77, 0xe6, 0x22, 0xeb, 0x4a, 0xa9, 0x2d, 0x67, 0x80, 0x39, 0x3a, 0xb3, 0x8e, 0xea, 0x90, 0x15, 0x98, 0xb9, 0x24, 0xc0, 0xed, 0x01, 0xf1, 0x36, 0x97, 0x15, 0xcf, 0x4a, 0xf2, 0x64, 0x67, 0x24, 0xd0, 0x19, 0x05, 0xc2, 0x1f, 0x92, 0xa7, 0x1c, 0x77, 0x64, 0x61, 0xf0, 0xa0, 0x41, 0x78, 0x87, 0x04, 0xc2, 0x01, 0x81, 0x59, 0x45, 0x13, 0xca, 0xf7, 0x7f, 0xfd, 0xeb, 0xe7, 0x9b, 0x77, 0xe1, 0xb6, 0x26, 0xc4, 0x7d, 0x54, 0x21, 0x1d, 0xe0, 0x01, 0xeb, 0x63, 0xeb, 0xa4, 0x40, 0x85, 0xff, 0xd2, 0xb0, 0x9e, 0x4c, 0x1c, 0xd5, 0x20, 0x43, 0xb9, 0xab, 0x72, 0x8e, 0xea, 0xb5, 0x7f, 0x81, 0x7a, 0xe9, 0xd7, 0x26, 0x11, 0xd5, 0x25, 0x67, 0x95, 0x72, 0xf5, 0x86, 0xbe, 0x80, 0x35, 0x1c, 0x78, 0x11, 0xcb, 0x7c, 0x2f, 0x56, 0x06, 0x07, 0x9e, 0x86, 0xd9, 0xb0, 0x16, 0x50, 0x11, 0xc1, 0x96, 0x2f, 0xdc, 0x48, 0xc9, 0x08, 0xa8, 0xd0, 0x8c, 0x5d, 0x19, 0xd0, 0x34, 0x62, 0xa4, 0xb6, 0x8d, 0xbd, 0x8c, 0xbd, 0xfa, 0xc6, 0x4e, 0x7d, 0x63, 0x66, 0x0c, 0xed, 0x6b, 0xaa, 0x75, 0x1e, 0x6c, 0xf5, 0x85, 0x60, 0x2e, 0x27, 0xdf, 0x8e, 0x48, 0x28, 0xdc, 0x3e, 0xc1, 0x1e, 0xe1, 0x61, 0x64, 0xb8, 0xa2, 0x9c, 0xdf, 0x9d, 0xeb, 0xbc, 0x2a, 0x04, 0xab, 0x6a, 0xb5, 0x42, 0x55, 0x97, 0x9c, 0x0d, 0x89, 0x72, 0x34, 0x69, 0x76, 0x0b, 0x75, 0xe1, 0x93, 0x84, 0x17, 0xc1, 0xb1, 0x3f, 0x38, 0x75, 0x93, 0xbe, 0x9c, 0x9b, 0xcd, 0x19, 0x37, 0xad, 0x88, 0x74, 0xd6, 0x4f, 0xc8, 0x68, 0x10, 0x92, 0x33, 0xe9, 0xac, 0xbe, 0x97, 0x1f, 0x8d, 0x4a, 0xe4, 0xd3, 0x87, 0xeb, 0x49, 0x3f, 0x67, 0x12, 0xca, 0x5c, 0xce, 0xd1, 0xc7, 0xb3, 0x8e, 0x12, 0x19, 0x6d, 0xfd, 0x62, 0x40, 0x26, 0x3e, 0x24, 0xe8, 0x31, 0xac, 0xf0, 0xd1, 0x80, 0x84, 0x9b, 0xc6, 0xf6, 0xf2, 0x65, 0xbe, 0xee, 0xd7, 0x86, 0x99, 0x31, 0x1d, 0x6d, 0x5b, 0xfe, 0x54, 0x7e, 0x34, 0x25, 0x78, 0xb4, 0xf8, 0xa3, 0x59, 0x70, 0x46, 0xcb, 0x45, 0x69, 0x79, 0x1f, 0xf6, 0x2e, 0x6a, 0x69, 0x67, 0x21, 0x25, 0x7d, 0xa2, 0xe5, 0x7f, 0x6d, 0xa3, 0xf0, 0x93, 0x01, 0xf9, 0xb3, 0xb9, 0xa3, 0xcf, 0x60, 0x35, 0x6a, 0x51, 0x94, 0xd3, 0x9d, 0x64, 0x4e, 0x6a, 0xec, 0xaa, 0xaa, 0x29, 0x95, 0x32, 0x22, 0xdc, 0x89, 0x8d, 0xca, 0x8f, 0x64, 0x48, 0xfb, 0x70, 0x7f, 0x71, 0x48, 0x67, 0x5d, 0x16, 0xfe, 0x34, 0x21, 0x37, 0x3b, 0xae, 0xd0, 0x21, 0xac, 0x84, 0x7e, 0xf0, 0x32, 0x8e, 0xe0, 0xd6, 0x5b, 0x06, 0x5c, 0xd3, 0x0f, 0x5e, 0xda, 0xf0, 0xc6, 0x5e, 0x7d, 0x6d, 0xa4, 0x32, 0x46, 0xde, 0x70, 0xb4, 0x25, 0xaa, 0xc3, 0xb5, 0x21, 0x9e, 0xb8, 0xed, 0x51, 0xb7, 0x4b, 0x38, 0xf1, 0x5c, 0x3e, 0x71, 0xdb, 0x53, 0x41, 0xc2, 0x68, 0x14, 0x5c, 0xb7, 0xf4, 0x6f, 0xc2, 0x8a, 0x7f, 0x13, 0xd6, 0x97, 0xb5, 0x40, 0x94, 0x0e, 0xbe, 0xc2, 0x83, 0x11, 0x71, 0xd0, 0x10, 0x4f, 0xec, 0xc8, 0xd2, 0x99, 0xd8, 0xd2, 0xee, 0x1c, 0x50, 0xc4, 0xc0, 0xe5, 0x4b, 0x02, 0x5b, 0x11, 0xf0, 0x3a, 0xac, 0x85, 0x82, 0x13, 0x3c, 0xf4, 0x83, 0x9e, 0x9e, 0x07, 0xce, 0xe9, 0x42, 0xf9, 0xa1, 0x2c, 0xe3, 0x1e, 0xec, 0x2e, 0x2e, 0xe3, 0x6c, 0xc5, 0x0a, 0xaf, 0x52, 0x00, 0xa7, 0x05, 0x41, 0x55, 0x48, 0x77, 0x29, 0x1f, 0x62, 0xa1, 0xa6, 0xe8, 0xfa, 0xc1, 0xee, 0x3b, 0x2a, 0x68, 0x3d, 0x55, 0x6a, 0x75, 0x34, 0x7f, 0x54, 0xff, 0x89, 0xc8, 0x1e, 0x39, 0xf0, 0xc1, 0x49, 0x50, 0x2e, 0xf6, 0x86, 0x7e, 0x10, 0x55, 0xf0, 0xde, 0x5c, 0x64, 0x33, 0xd6, 0x1e, 0x4a, 0xa9, 0x44, 0x57, 0x97, 0x9c, 0xf5, 0x30, 0xb1, 0x8a, 0x8e, 0x20, 0xd7, 0xf5, 0x07, 0xc4, 0x65, 0x84, 0xbb, 0x02, 0xb3, 0xb7, 0x0e, 0xd4, 0xa7, 0xfe, 0x80, 0x34, 0x08, 0x6f, 0x61, 0x16, 0xc1, 0xa0, 0x7b, 0xb2, 0x82, 0xea, 0x70, 0x8a, 0x76, 0xe5, 0xcd, 0x41, 0xd5, 0x31, 0xbb, 0x20, 0xdd, 0x93, 0xd8, 0x8e, 0x38, 0xeb, 0x44, 0xb4, 0x2b, 0xe1, 0xec, 0x62, 0xe1, 0x7b, 0x48, 0xeb, 0x4a, 0xa0, 0x8f, 0x00, 0x7d, 0xde, 0xac, 0x1f, 0xbb, 0x76, 0xfd, 0xc9, 0x0b, 0xf7, 0xb0, 0xe9, 0xda, 0x2f, 0x5a, 0x95, 0x66, 0x7e, 0x09, 0x6d, 0xc0, 0xd5, 0xc4, 0x7a, 0xb3, 0xe5, 0xd4, 0x8e, 0x8f, 0xf2, 0x06, 0xca, 0x43, 0xae, 0xe1, 0xd4, 0x5b, 0x75, 0xd7, 0xae, 0x1d, 0x1f, 0x3a, 0x2f, 0xf2, 0x26, 0xda, 0x81, 0x1b, 0xb3, 0x2b, 0xee, 0xb3, 0xca, 0xf1, 0x51, 0xab, 0xea, 0x3e, 0xa9, 0x3c, 0xab, 0x3d, 0xaf, 0xb5, 0x2a, 0x4f, 0xf2, 0xcb, 0x68, 0x1d, 0x40, 0x4b, 0x5a, 0x95, 0xaf, 0x5b, 0xf9, 0x54, 0xf9, 0x81, 0xec, 0xfa, 0x2e, 0xdc, 0x79, 0x57, 0xd7, 0xd5, 0xa9, 0xdf, 0x80, 0x7c, 0x74, 0x1d, 0x90, 0x47, 0xde, 0x15, 0x53, 0x16, 0x7d, 0xd7, 0x35, 0x40, 0xe7, 0xfb, 0x50, 0x2e, 0x49, 0xb6, 0x05, 0xfb, 0x8b, 0xd9, 0xe7, 0x8d, 0x0a, 0x2f, 0x61, 0x3d, 0xd9, 0x01, 0xb4, 0x07, 0x59, 0x86, 0x45, 0xdf, 0x65, 0x9c, 0x74, 0xfd, 0x89, 0x3a, 0x5f, 0x6b, 0xea, 0x47, 0xc6, 0xcd, 0x6d, 0xc3, 0x01, 0xb9, 0xd7, 0x50, 0x5b, 0x17, 0x18, 0x4e, 0x49, 0x74, 0xe1, 0x77, 0x03, 0x3e, 0x3c, 0xd7, 0x24, 0x74, 0x0d, 0xd2, 0xf2, 0x8e, 0xe2, 0x7b, 0xda, 0x97, 0xb3, 0x22, 0x30, 0xab, 0x79, 0xe8, 0x18, 0x72, 0xb3, 0x77, 0xc5, 0xf9, 0x77, 0xa1, 0xf8, 0xee, 0xa2, 0x60, 0x5a, 0x38, 0x7b, 0xb9, 0xea, 0x9d, 0x2e, 0x97, 0x0f, 0x64, 0xb4, 0x0f, 0xe1, 0xc1, 0x05, 0xca, 0x13, 0x87, 0x66, 0x97, 0xff, 0xf8, 0xe1, 0xef, 0x7f, 0xd2, 0x66, 0xde, 0x84, 0x1d, 0x9f, 0x6a, 0xcf, 0x8c, 0xd3, 0xc9, 0x74, 0xde, 0xf1, 0xb3, 0xb3, 0x8f, 0xd5, 0x9d, 0xb9, 0x21, 0xe7, 0x43, 0xc3, 0x68, 0xa7, 0xd5, 0xa0, 0x28, 0xfd, 0x1f, 0x00, 0x00, 0xff, 0xff, 0xf1, 0x15, 0x48, 0xa7, 0x5f, 0x0b, 0x00, 0x00, }
carmanzhang/cornac
tests/cornac/metrics/test_ranking.py
# Copyright 2018 The Cornac Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ import unittest import numpy as np from cornac.metrics.ranking import RankingMetric from cornac.metrics.ranking import MeasureAtK from cornac.metrics import NDCG from cornac.metrics import NCRR from cornac.metrics import MRR from cornac.metrics import Precision from cornac.metrics import Recall from cornac.metrics import FMeasure from cornac.metrics import AUC from cornac.metrics import MAP class TestRanking(unittest.TestCase): def test_ranking_metric(self): metric = RankingMetric() self.assertEqual(metric.type, "ranking") self.assertIsNone(metric.name) self.assertEqual(metric.k, -1) try: metric.compute() except NotImplementedError: assert True def test_ndcg(self): ndcg = NDCG() self.assertEqual(ndcg.type, "ranking") self.assertEqual(ndcg.name, "NDCG@-1") self.assertEqual(1, ndcg.compute(np.asarray([1]), np.asarray([0]))) ground_truth = np.asarray([1, 0, 1]) # [1, 3] rec_list = np.asarray([0, 2, 1]) # [1, 3, 2] self.assertEqual(1, ndcg.compute(ground_truth, rec_list)) ndcg_2 = NDCG(k=2) self.assertEqual(ndcg_2.k, 2) ground_truth = np.asarray([0, 0, 1]) # [3] rec_list = np.asarray([1, 2, 0]) # [2, 3, 1] self.assertEqual( 0.63, float("{:.2f}".format(ndcg_2.compute(ground_truth, rec_list))) ) def test_ncrr(self): ncrr = NCRR() self.assertEqual(ncrr.type, "ranking") self.assertEqual(ncrr.name, "NCRR@-1") self.assertEqual(1, ncrr.compute(np.asarray([1]), np.asarray([0]))) ground_truth = np.asarray([1, 0, 1]) # [1, 3] rec_list = np.asarray([0, 2, 1]) # [1, 3, 2] self.assertEqual(1, ncrr.compute(ground_truth, rec_list)) ground_truth = np.asarray([1, 0, 1]) # [1, 3] rec_list = np.asarray([1, 2, 0]) # [2, 3, 1] self.assertEqual( ((1 / 3 + 1 / 2) / (1 + 1 / 2)), ncrr.compute(ground_truth, rec_list) ) ncrr_2 = NCRR(k=2) self.assertEqual(ncrr_2.k, 2) ground_truth = np.asarray([0, 0, 1]) # [3] rec_list = np.asarray([1, 2, 0]) # [2, 3, 1] self.assertEqual(0.5, ncrr_2.compute(ground_truth, rec_list)) ground_truth = np.asarray([0, 0, 1]) # [3] rec_list = np.asarray([4, 1, 2]) # [5, 2, 3] self.assertEqual(0.0, ncrr_2.compute(ground_truth, rec_list)) ground_truth = np.asarray([1, 1, 1]) # [1, 2, 3] rec_list = np.asarray([5, 1, 6]) # [6, 2, 7] self.assertEqual(1.0 / 3.0, ncrr_2.compute(ground_truth, rec_list)) ncrr_3 = NCRR(k=3) ground_truth = np.asarray([1, 1]) # [1, 2] rec_list = np.asarray([5, 1, 6, 8]) # [6, 2, 7, 9] self.assertEqual(1.0 / 3.0, ncrr_3.compute(ground_truth, rec_list)) def test_mrr(self): mrr = MRR() self.assertEqual(mrr.type, "ranking") self.assertEqual(mrr.name, "MRR") self.assertEqual(1, mrr.compute(np.asarray([1]), np.asarray([0]))) ground_truth = np.asarray([1, 0, 1]) # [1, 3] rec_list = np.asarray([0, 2, 1]) # [1, 3, 2] self.assertEqual(1, mrr.compute(ground_truth, rec_list)) ground_truth = np.asarray([1, 0, 1]) # [1, 3] rec_list = np.asarray([1, 2, 0]) # [2, 3, 1] self.assertEqual(1 / 2, mrr.compute(ground_truth, rec_list)) ground_truth = np.asarray([1, 0, 1]) # [1, 3] rec_list = np.asarray([1]) # [2] try: mrr.compute(ground_truth, rec_list) except ValueError: assert True def test_measure_at_k(self): measure_at_k = MeasureAtK() self.assertEqual(measure_at_k.type, "ranking") assert measure_at_k.name is None self.assertEqual(measure_at_k.k, -1) tp, tp_fn, tp_fp = measure_at_k.compute(np.asarray([1]), np.asarray([0])) self.assertEqual(1, tp) self.assertEqual(1, tp_fn) self.assertEqual(1, tp_fp) ground_truth = np.asarray([1, 0, 1]) # [1, 0, 1] rec_list = np.asarray([0, 2, 1]) # [1, 1, 1] tp, tp_fn, tp_fp = measure_at_k.compute(ground_truth, rec_list) self.assertEqual(2, tp) self.assertEqual(2, tp_fn) self.assertEqual(3, tp_fp) def test_precision(self): prec = Precision() self.assertEqual(prec.type, "ranking") self.assertEqual(prec.name, "Precision@-1") self.assertEqual(1, prec.compute(np.asarray([1]), np.asarray([0]))) ground_truth = np.asarray([1, 0, 1]) # [1, 0, 1] rec_list = np.asarray([0, 2, 1]) # [1, 1, 1] self.assertEqual((2 / 3), prec.compute(ground_truth, rec_list)) ground_truth = np.asarray([0, 0, 1]) # [0, 0, 1] rec_list = np.asarray([1, 2, 0]) # [1, 1, 1] self.assertEqual((1 / 3), prec.compute(ground_truth, rec_list)) prec_2 = Precision(k=2) self.assertEqual(prec_2.k, 2) ground_truth = np.asarray([0, 0, 1]) # [0, 0, 1] rec_list = np.asarray([1, 2, 0]) # [1, 1, 1] self.assertEqual(0.5, prec_2.compute(ground_truth, rec_list)) def test_recall(self): rec = Recall() self.assertEqual(rec.type, "ranking") self.assertEqual(rec.name, "Recall@-1") self.assertEqual(1, rec.compute(np.asarray([1]), np.asarray([0]))) ground_truth = np.asarray([1, 0, 1]) # [1, 0, 1] rec_list = np.asarray([0, 2, 1]) # [1, 1, 1] self.assertEqual(1, rec.compute(ground_truth, rec_list)) ground_truth = np.asarray([0, 0, 1]) # [0, 0, 1] rec_list = np.asarray([1, 2, 0]) # [1, 1, 1] self.assertEqual(1, rec.compute(ground_truth, rec_list)) rec_2 = Recall(k=2) self.assertEqual(rec_2.k, 2) ground_truth = np.asarray([0, 0, 1]) # [0, 0, 1] rec_list = np.asarray([1, 2, 0]) # [1, 1, 1] self.assertEqual(1, rec_2.compute(ground_truth, rec_list)) def test_f_measure(self): f1 = FMeasure() self.assertEqual(f1.type, "ranking") self.assertEqual(f1.name, "F1@-1") self.assertEqual(1, f1.compute(np.asarray([1]), np.asarray([0]))) ground_truth = np.asarray([1, 0, 1]) # [1, 0, 1] rec_list = np.asarray([0, 2, 1]) # [1, 1, 1] self.assertEqual((4 / 5), f1.compute(ground_truth, rec_list)) ground_truth = np.asarray([0, 0, 1]) # [0, 0, 1] rec_list = np.asarray([1, 2, 0]) # [1, 1, 1] self.assertEqual((1 / 2), f1.compute(ground_truth, rec_list)) f1_2 = FMeasure(k=2) self.assertEqual(f1_2.k, 2) ground_truth = np.asarray([0, 0, 1]) # [0, 0, 1] rec_list = np.asarray([1, 2, 0]) # [1, 1, 1] self.assertEqual((2 / 3), f1_2.compute(ground_truth, rec_list)) ground_truth = np.asarray([1, 0, 0]) # [1, 0, 0] rec_list = np.asarray([1, 2]) # [0, 1, 1] self.assertEqual(0, f1_2.compute(ground_truth, rec_list)) def test_auc(self): auc = AUC() self.assertEqual(auc.type, "ranking") self.assertEqual(auc.name, "AUC") gt_pos = np.array([0, 0, 1, 1]) pd_scores = np.array([0.1, 0.4, 0.35, 0.8]) auc_score = auc.compute(pd_scores, gt_pos) self.assertEqual(0.75, auc_score) gt_pos = np.array([0, 1, 0, 1]) pd_scores = np.array([0.1, 0.4, 0.35, 0.8]) auc_score = auc.compute(pd_scores, gt_pos) self.assertEqual(1.0, auc_score) gt_pos = np.array([0, 0, 1, 0]) gt_neg = np.array([1, 1, 0, 0]) pd_scores = np.array([0.1, 0.4, 0.35, 0.8]) auc_score = auc.compute(pd_scores, gt_pos, gt_neg) self.assertEqual(0.5, auc_score) def test_map(self): mAP = MAP() self.assertEqual(mAP.type, "ranking") self.assertEqual(mAP.name, "MAP") gt_pos = np.array([1, 0, 0]) pd_scores = np.array([0.75, 0.5, 1]) self.assertEqual(0.5, mAP.compute(pd_scores, gt_pos)) gt_pos = np.array([0, 0, 1]) pd_scores = np.array([1, 0.2, 0.1]) self.assertEqual(1 / 3, mAP.compute(pd_scores, gt_pos)) gt_pos = np.array([0, 1, 0, 1, 0, 1, 0, 0, 0, 0]) pd_scores = np.linspace(0.0, 1.0, len(gt_pos))[::-1] self.assertEqual(0.5, mAP.compute(pd_scores, gt_pos)) if __name__ == "__main__": unittest.main()
GernotMaier/Eventdisplay
inc/VInstrumentResponseFunctionRunParameter.h
<reponame>GernotMaier/Eventdisplay //! VInstrumentResponseFunctionRunParameter run parameters for response function calculator (effective areas) #ifndef VInstrumentResponseFunctionRunParameter_H #define VInstrumentResponseFunctionRunParameter_H #include "Ctelconfig.h" #include "VEvndispRunParameter.h" #include "VMonteCarloRunHeader.h" #include "VTableLookupRunParameter.h" #include "CData.h" #include "VEnergySpectrumfromLiterature.h" #include <fstream> #include <getopt.h> #include <iostream> #include <sstream> #include <string> #include <vector> #include "TChain.h" #include "TF1.h" #include "TNamed.h" using namespace std; class VInstrumentResponseFunctionRunParameter : public TNamed { private: bool readRunParameters( string ifilename ); bool readCRSpectralParameters(); public: string fObservatory; unsigned int fFillingMode; // filling mode bool fEffArea_short_writing; // short/long tree writing vector< string > fCutFileName; vector< float > fCutCharacteristicMCAZ; string fInstrumentEpoch; vector< unsigned int > fTelToAnalyse; // telescopes used in analysis (optional, not always filled) int fGammaHadronCutSelector; int fDirectionCutSelector; E_ReconstructionType fReconstructionType; unsigned int fEnergyReconstructionMethod; bool fIgnoreEnergyReconstructionQuality; unsigned int fNSpectralIndex; double fSpectralIndexMin; double fSpectralIndexStep; vector< double > fSpectralIndex; double fMCEnergy_min; double fMCEnergy_max; double fMCEnergy_index; bool fFillMCHistograms; bool fgetXoff_Yoff_afterCut; string fWriteEventdatatrees; // IRF histogram bin definition unsigned int fEnergyAxisBins_log10; double fEnergyAxis_logTeV_min; double fEnergyAxis_logTeV_max; unsigned int fBiasBin; // Energy bias (bias bins) unsigned int fLogAngularBin; // Angular resolution Log10 (bins) double fhistoAngularBin_min; double fhistoAngularBin_max; unsigned int fResponseMatricesEbinning; // bins in the ResponseMatrices unsigned int fhistoNEbins; // E binning (affects 2D histograms only) double fhistoNEbins_logTeV_min; double fhistoNEbins_logTeV_max; string fCoreScatterMode; double fCoreScatterRadius; double fViewcone_min; double fViewcone_max; bool fAzimuthBins; bool fIsotropicArrivalDirections; float fIgnoreFractionOfEvents; bool fTelescopeTypeCuts; string fdatafile; string fMCdatafile_tree; string fMCdatafile_histo; string fGammaHadronProbabilityFile; double fze; int fnoise; double fpedvar; double fXoff; double fYoff; vector< double > fAzMin; vector< double > fAzMax; double fWobbleIsotropic; unsigned int telconfig_ntel; double telconfig_arraycentre_X; double telconfig_arraycentre_Y; double telconfig_arraymax; string fCREnergySpectrumFile; unsigned int fCREnergySpectrumID; TF1* fCREnergySpectrum; VInstrumentResponseFunctionRunParameter(); ~VInstrumentResponseFunctionRunParameter() {} vector< string > getCutFileName() { return fCutFileName; } vector< float > getCutCharacteristicMCAZ() { return fCutCharacteristicMCAZ; } string getInstrumentEpoch( bool iMajor = false ); TTree* getTelConfigTree(); void print(); VMonteCarloRunHeader* readMCRunHeader(); bool readRunParameterFromTextFile( string iFile ); bool testRunparameters(); ClassDef( VInstrumentResponseFunctionRunParameter, 22 ); }; #endif
linqyd/etk
etk/structured_extractors/digTokenizerExtractor/digTokenizer/rowTokenizer.py
<gh_stars>0 #!/usr/bin/env python from ngram import ngram import re import unicodedata import itertools class RowTokenizer: def __init__(self, row, json_config): self.config = json_config self.settings = json_config["settings"] self.rows = [] self.__tokenize(row) self.index = 0 def next(self): if self.index < len(self.rows): self.index += 1 return self.rows[self.index-1] return None def __tokenize(self, row): dict_analyzer = dict() dict_prefix = dict() dict_blank_fields = dict() final_row = list() for index, field_value in enumerate(row): dict_blank_fields[index] = False dict_prefix[index] = "" if str(index) in self.config["fieldConfig"]: field_config = self.config["fieldConfig"][str(index)] if "analyzer" in field_config: analyzer = field_config["analyzer"] else: analyzer = self.config["defaultConfig"]["analyzer"] if "prefix" in field_config: dict_prefix[index] = field_config["prefix"] if "allow_blank" in field_config: dict_blank_fields[index] = field_config["allow_blank"] final_row.append(field_value) else: final_row.append('') analyzer = self.config["defaultConfig"]["analyzer"] dict_analyzer[index] = analyzer row = final_row #print "Tokenize row:", row multi_lines = self.__get_cross_product(dict_blank_fields, row) for line in multi_lines: tokens = [] for index, field_value in enumerate(line): field_tokens = self.__analyze_field(field_value, dict_prefix[index], dict_analyzer[index], self.settings) tokens.extend(field_tokens) self.rows.append(tokens) @staticmethod def __get_cross_product(dict_blank_fields, line): field_values = [] for index, field_value in enumerate(line): values = list() values.append(field_value) if len(field_value) > 0: if dict_blank_fields[index] is True: values.append("") field_values.append(values) return itertools.product(*field_values) #returns the tokens removing the stop words @staticmethod def __tokenize_input_stopwords(input, stop_words): tokens = input.split() for token in tokens: if not token in stop_words: yield token @staticmethod def __tokenize_input(input): tokens = input.split() for token in tokens: yield token def __get_n_grams(self, text, n_type, n): #removes the stop words tokens = list(self.__tokenize_input(text)) if n_type == "word": if len(tokens) > n: return ["".join(j) for j in zip(*[tokens[i:] for i in range(n)])] else: #returns the word directly if n is greater than number of words a = list() a.append(text) return a if n_type == "character": gram_object = ngram.NGram(N=n) gram_char_tokens = list(gram_object.split(text)) if len(text) > n: return gram_char_tokens else: a = list() a.append(text) return a else: return list(self.__tokenize_input(text)) # does regex evaluations specified in configuration file, converts to utf8, lowercase #returns the tokens character or word def __analyze_field(self, text, prefix, analyzer, settings): text = unicode(text) if "filters" in analyzer: for filter_name in analyzer["filters"]: if filter_name == "lowercase": text = text.lower() elif filter_name == "uppercase": text = text.upper() elif filter_name == "latin": nfkd_form = unicodedata.normalize('NFKD', unicode(text)) text = unicode(nfkd_form) elif filter_name == 'mostlyHTML': htmltags_count = len(re.findall("<.*?>",text)) if htmltags_count > 40: text = '' else: filter_settings = settings[filter_name] if filter_settings["type"] == "stop": words = filter_settings["words"] tokens = self.__tokenize_input_stopwords(text, words) text = unicode(" ".join(tokens)) #evaluates based on the sent_replacements and word_replacements if "replacements" in analyzer: text = text.lower() wordlist = [] if 'word_replacements' in analyzer['replacements']: for word in text.split(): word = word.replace('\\n','').replace('\\t','') #we shouldn't remove the word that have \u in them those are unicode characters if '\u' in word: wordlist.append(word) continue if 'href' in word or 'www' in word: continue for replacement in analyzer['replacements']['word_replacements']: p = re.compile(replacement['regex'], re.UNICODE) word = p.sub(replacement['replacement'], word) wordlist.append(word) text = " ".join(wordlist) if 'sent_replacements' in analyzer['replacements']: for replacement in analyzer['replacements']["sent_replacements"]: p = re.compile(replacement['regex'], re.UNICODE) text = p.sub(replacement['replacement'], text) text = text.strip() elif 'word_replacements' not in analyzer['replacements'] or 'sent_replacements' not in analyzer['replacements']: for replacement in analyzer["replacements"]: p = re.compile(replacement['regex'], re.UNICODE) text = p.sub(replacement['replacement'], text) tokens = [] if "tokenizers" in analyzer: for tokenizer in analyzer["tokenizers"]: if tokenizer == "whitespace": tokens.extend(self.__tokenize_input(text)) else: tokenizer_setting = settings[tokenizer] if tokenizer_setting["type"] == "character_ngram": size = int(tokenizer_setting["size"]) tokens.extend(self.__get_n_grams(text, "character", size)) elif tokenizer_setting["type"] == "word_ngram": size = int(tokenizer_setting["size"]) tokens.extend(self.__get_n_grams(text, "word", size)) final_tokens = [] for token in tokens: if len(token) > 0: final_tokens.append(prefix.encode('utf-8') + token.encode('utf-8')) return final_tokens
EntityFX/vfpbench
src/MultiAdapter.h
// 2014 <NAME> // vim:ts=4 sw=4 noet: #ifndef MULTI_ADAPTER_H_ #define MULTI_ADAPTER_H_ #include <minilib/CoreLib.h> #include <minilib/Thread.h> #include <minilib/FixedArray.h> #include "TestBase.h" template<typename T> class MultiAdapter : public ITestBase { public: private: flatlib::ut::FixedArray<T> InstanceArray; flatlib::ut::FixedArrayPOD<flatlib::thread::ThreadFunctionBase*> ThreadArray; public: MultiAdapter( unsigned int group ) : ITestBase( true, group ) { unsigned int core= flatlib::Info.GetThreadCount( group ); InstanceArray.Init( core ); ThreadArray.Init( core ); ThreadArray.ClearZero(); unsigned int instance_count= InstanceArray.GetSize(); for( unsigned int ci= 0 ; ci< instance_count ; ci++ ){ InstanceArray[ci].SetIsMultithread( true ); InstanceArray[ci].SetCoreGroup( group ); } } ~MultiAdapter() { Quit(); } void Run_Direct() { FL_ASSERT( 0 ); } void Quit() { FL_LOG( "MultiAdapter QUIT\n" ); Join(); InstanceArray.Clear(); ThreadArray.Clear(); FL_LOG( "MultiAdapter QUIT COMPLETE\n" ); } void Join() { unsigned int thread_count= ThreadArray.GetSize(); for( unsigned int ci= 0 ; ci< thread_count ; ci++ ){ if( ThreadArray[ci] ){ FL_LOG( "MultiAdapter JOIN %d\n", ci ); ThreadArray[ci]->Join(); flatlib::memory::ZDelete( ThreadArray[ci] ); FL_LOG( "MultiAdapter JOIN QUIT %d\n", ci ); } } } void Run() override { unsigned int thread_count= ThreadArray.GetSize(); for( unsigned int ci= 0 ; ci< thread_count ; ci++ ){ ThreadArray[ci]= flatlib::thread::CreateThreadFunction( [=](){ FL_LOG( "Run : In MultiThread %d\n", ci ); this->SetCpuAffinity(); this->InstanceArray[ci].SetIsMultithread( true ); this->InstanceArray[ci].Run(); FL_LOG( "End MultiThread %d\n", ci ); }); } for( unsigned int ci= 0 ; ci< thread_count ; ci++ ){ ThreadArray[ci]->Run(); } } volatile unsigned int IsDone() override { unsigned int thread_count= InstanceArray.GetSize(); for( unsigned int ci= 0 ; ci< thread_count ; ci++ ){ if( !InstanceArray[ci].IsDone() ){ return false; } } Join(); return true; } volatile unsigned int GetProgress() override { unsigned int thread_count= InstanceArray.GetSize(); if( thread_count == 0 ){ return 0; } return InstanceArray[0].GetProgress(); } void SetLoop( unsigned int loop ) override { unsigned int thread_count= InstanceArray.GetSize(); for( unsigned int ci= 0 ; ci< thread_count ; ci++ ){ InstanceArray[ci].SetLoop( loop ); } } unsigned int GetResult( unsigned int index ) const override { unsigned int thread_count= InstanceArray.GetSize(); uint64_t total_time= 0; for( unsigned int ci= 0 ; ci< thread_count ; ci++ ){ unsigned int time= InstanceArray[ci].GetResult( index ); total_time+= time; } return static_cast<unsigned int>( total_time / thread_count ); } unsigned int GetResultInfo( InfoType index ) const override { unsigned int thread_count= InstanceArray.GetSize(); if( thread_count == 0 ){ return 0; } return InstanceArray[0].GetResultInfo( index ); } unsigned int GetLoopOp( unsigned int index ) const override { unsigned int thread_count= InstanceArray.GetSize(); unsigned int total_op= 0; for( unsigned int ci= 0 ; ci< thread_count ; ci++ ){ total_op+= InstanceArray[ci].GetLoopOp( index ); } return total_op; } float GetInstFop( unsigned int index ) const override { unsigned int thread_count= InstanceArray.GetSize(); unsigned int total_op= 0; for( unsigned int ci= 0 ; ci< thread_count ; ci++ ){ total_op+= InstanceArray[ci].GetInstFop( index ); } return total_op; } const char* GetTestName() const override { return InstanceArray[0].GetTestName(); } const char* GetInstructionName( unsigned int result_index ) const override { return InstanceArray[0].GetInstructionName( result_index ); } bool IsMultithread() const override { return true; } unsigned int GetLoopType() const override { return InstanceArray[0].GetLoopType(); } }; #endif
JarredStanford/JarredStanford.github.io
node_modules/grommet/components/Video/Video.js
"use strict"; exports.__esModule = true; exports.Video = void 0; var _react = _interopRequireWildcard(require("react")); var _recompose = require("recompose"); var _styledComponents = require("styled-components"); var _defaultProps = require("../../default-props"); var _Box = require("../Box"); var _Button = require("../Button"); var _Menu = require("../Menu"); var _Meter = require("../Meter"); var _Stack = require("../Stack"); var _Text = require("../Text"); var _hocs = require("../hocs"); var _utils = require("../../utils"); var _StyledVideo = require("./StyledVideo"); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = Object.defineProperty && Object.getOwnPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : {}; if (desc.get || desc.set) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } } newObj["default"] = obj; return newObj; } } function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; } function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } function _assertThisInitialized(self) { if (self === void 0) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return self; } function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } // Split the volume control into 6 segments. Empirically determined. var VOLUME_STEP = 0.166667; var formatTime = function formatTime(time) { var minutes = Math.round(time / 60); if (minutes < 10) { minutes = "0" + minutes; } var seconds = Math.round(time) % 60; if (seconds < 10) { seconds = "0" + seconds; } return minutes + ":" + seconds; }; var videoEvents = ['onAbort', 'onCanPlay', 'onCanPlayThrough', 'onDurationChange', 'onEmptied', 'onEncrypted', 'onEnded', 'onError', 'onLoadedData', 'onLoadedMetadata', 'onLoadStart', 'onPause', 'onPlay', 'onPlaying', 'onProgress', 'onRateChange', 'onSeeked', 'onSeeking', 'onStalled', 'onSuspend', 'onTimeUpdate', 'onVolumeChange', 'onWaiting']; var Video = /*#__PURE__*/ function (_Component) { _inheritsLoose(Video, _Component); Video.getDerivedStateFromProps = function getDerivedStateFromProps(nextProps, prevState) { var forwardRef = nextProps.forwardRef; var videoRef = prevState.videoRef; var nextVideoRef = forwardRef || videoRef; if (nextVideoRef !== videoRef) { return { videoRef: nextVideoRef }; } return null; }; function Video(props) { var _this; _this = _Component.call(this, props) || this; _defineProperty(_assertThisInitialized(_this), "state", { captions: [], scrubberRef: _react["default"].createRef(), videoRef: _react["default"].createRef() }); _defineProperty(_assertThisInitialized(_this), "hasPlayed", false); _defineProperty(_assertThisInitialized(_this), "injectUpdateVideoEvents", function () { return videoEvents.reduce(function (previousValue, currentValue) { var nextValue = _extends({}, previousValue); nextValue[currentValue] = function (e) { if (currentValue in _this.props && /* eslint-disable react/destructuring-assignment */ typeof _this.props[currentValue] === 'function') { _this.props[currentValue](e); /* eslint-enable react/destructuring-assignment */ } _this.update(); }; return nextValue; }, {}); }); _defineProperty(_assertThisInitialized(_this), "update", function () { var videoRef = _this.state.videoRef; var video = videoRef.current; // Set flag for Video first play if (!_this.hasPlayed && !video.paused && !video.loading || video.currentTime) { _this.hasPlayed = true; } var interacting = _this.state.interacting; if (video.ended) { interacting = false; } _this.setState({ duration: video.duration, currentTime: video.currentTime, // buffered: video.buffered, // paused: video.paused, // muted: video.muted, volume: video.volume, // ended: video.ended, // readyState: video.readyState, interacting: interacting, // computed values // hasPlayed: this.hasPlayed, playing: !video.paused && !video.loading, // percentageBuffered: video.buffered.length && // (video.buffered.end(video.buffered.length - 1) / // video.duration) * 100, percentagePlayed: video.currentTime / video.duration * 100 // loading: video.readyState < video.HAVE_ENOUGH_DATA, }); }); _defineProperty(_assertThisInitialized(_this), "play", function () { var videoRef = _this.state.videoRef; videoRef.current.play(); }); _defineProperty(_assertThisInitialized(_this), "pause", function () { var videoRef = _this.state.videoRef; videoRef.current.pause(); }); _defineProperty(_assertThisInitialized(_this), "scrub", function (event) { var _this$state = _this.state, duration = _this$state.duration, scrubberRef = _this$state.scrubberRef; if (scrubberRef.current) { var scrubberRect = scrubberRef.current.getBoundingClientRect(); var percent = (event.clientX - scrubberRect.left) / scrubberRect.width; _this.setState({ scrubTime: duration * percent }); } }); _defineProperty(_assertThisInitialized(_this), "seek", function (event) { var _this$state2 = _this.state, duration = _this$state2.duration, scrubberRef = _this$state2.scrubberRef, videoRef = _this$state2.videoRef; if (scrubberRef.current) { var scrubberRect = scrubberRef.current.getBoundingClientRect(); var percent = (event.clientX - scrubberRect.left) / scrubberRect.width; videoRef.current.currentTime = duration * percent; } }); _defineProperty(_assertThisInitialized(_this), "unmute", function () { var videoRef = _this.state.videoRef; if (videoRef.current) { videoRef.current.muted = false; } }); _defineProperty(_assertThisInitialized(_this), "mute", function () { var videoRef = _this.state.videoRef; if (videoRef.current) { videoRef.current.muted = true; } }); _defineProperty(_assertThisInitialized(_this), "louder", function () { var videoRef = _this.state.videoRef; videoRef.current.volume += VOLUME_STEP; }); _defineProperty(_assertThisInitialized(_this), "quieter", function () { var videoRef = _this.state.videoRef; videoRef.current.volume -= VOLUME_STEP; }); _defineProperty(_assertThisInitialized(_this), "showCaptions", function (index) { var videoRef = _this.state.videoRef; var textTracks = videoRef.current.textTracks; for (var i = 0; i < textTracks.length; i += 1) { textTracks[i].mode = i === index ? 'showing' : 'hidden'; } // Using forceUpdate to force redraw of controls when changing captions _this.forceUpdate(); }); _defineProperty(_assertThisInitialized(_this), "fullscreen", function () { var videoRef = _this.state.videoRef; var video = videoRef.current; if (video.requestFullscreen) { video.requestFullscreen(); } else if (video.msRequestFullscreen) { video.msRequestFullscreen(); } else if (video.mozRequestFullScreen) { video.mozRequestFullScreen(); } else if (video.webkitRequestFullscreen) { video.webkitRequestFullscreen(); } else { console.warn("Your browser doesn't support fullscreen."); } }); _defineProperty(_assertThisInitialized(_this), "interactionStart", function () { _this.setState({ interacting: true }); clearTimeout(_this.interactionTimer); _this.interactionTimer = setTimeout(_this.interactionStop, 3000); }); _defineProperty(_assertThisInitialized(_this), "interactionStop", function () { var focus = _this.state.focus; if (!focus && !_this.unmounted) { _this.setState({ interacting: false }); } }); _defineProperty(_assertThisInitialized(_this), "restate", function () { var _this$state3 = _this.state, captions = _this$state3.captions, height = _this$state3.height, videoRef = _this$state3.videoRef, width = _this$state3.width; var video = videoRef.current; if (video) { if (video.videoHeight) { // set the size based on the video aspect ratio var rect = video.getBoundingClientRect(); var ratio = rect.width / rect.height; var videoRatio = video.videoWidth / video.videoHeight; if (videoRatio > ratio) { var nextHeight = rect.width / videoRatio; if (nextHeight !== height) { _this.setState({ height: nextHeight, width: undefined }); } } else { var nextWidth = rect.height * videoRatio; if (nextWidth !== width) { _this.setState({ height: undefined, width: nextWidth }); } } } // remember the state of the text tracks for subsequent rendering var textTracks = video.textTracks; if (textTracks.length > 0) { if (textTracks.length === 1) { var active = textTracks[0].mode === 'showing'; if (!captions || !captions[0] || captions[0].active !== active) { _this.setState({ captions: [{ active: active }] }); } } else { var nextCaptions = []; var set = false; for (var i = 0; i < textTracks.length; i += 1) { var track = textTracks[i]; var _active = track.mode === 'showing'; nextCaptions.push({ label: track.label, active: _active }); if (!captions || !captions[i] || captions[i].active !== _active) { set = true; } } if (set) { _this.setState({ captions: nextCaptions }); } } } } }); _this.update = (0, _utils.throttle)(_this.update, 100, _assertThisInitialized(_this)); _this.mediaEventProps = _this.injectUpdateVideoEvents(); return _this; } var _proto = Video.prototype; _proto.componentDidMount = function componentDidMount() { var mute = this.props.mute; var videoRef = this.state.videoRef; var video = videoRef.current; if (mute) { this.mute(); } if (video) { // hide all captioning to start with var textTracks = video.textTracks; for (var i = 0; i < textTracks.length; i += 1) { textTracks[i].mode = 'hidden'; } this.restate(); } }; _proto.componentDidUpdate = function componentDidUpdate(prevProps) { var autoPlay = this.props.autoPlay; if (autoPlay && !prevProps.autoPlay) { // Caller wants the video to play now. this.play(); } this.restate(); }; _proto.componentWillUnmount = function componentWillUnmount() { this.unmounted = true; }; _proto.renderControls = function renderControls() { var _this2 = this; var _this$props = this.props, controls = _this$props.controls, theme = _this$props.theme; var _this$state4 = this.state, captions = _this$state4.captions, currentTime = _this$state4.currentTime, duration = _this$state4.duration, interacting = _this$state4.interacting, percentagePlayed = _this$state4.percentagePlayed, playing = _this$state4.playing, scrubberRef = _this$state4.scrubberRef, scrubTime = _this$state4.scrubTime, volume = _this$state4.volume; var over = controls === 'over'; var background = over ? theme.video.controls && theme.video.controls.background || { color: 'dark-1', opacity: 'strong' } : undefined; var iconColor = over && (theme.video.icons.color || 'light-1'); var formattedTime = formatTime(scrubTime || currentTime || duration); var Icons = { ClosedCaption: theme.video.icons.closedCaption, Configure: theme.video.icons.configure, FullScreen: theme.video.icons.fullScreen, Pause: theme.video.icons.pause, Play: theme.video.icons.play, ReduceVolume: theme.video.icons.reduceVolume, Volume: theme.video.icons.volume }; var captionControls = captions.map(function (caption) { return { icon: caption.label ? undefined : _react["default"].createElement(Icons.ClosedCaption, { color: iconColor }), label: caption.label, active: caption.active, onClick: function onClick() { return _this2.showCaptions(caption.active ? -1 : 0); } }; }); return _react["default"].createElement(_StyledVideo.StyledVideoControls, { over: over, active: !this.hasPlayed || controls === 'below' || over && interacting }, _react["default"].createElement(_Box.Box, { direction: "row", align: "center", justify: "between", background: background }, _react["default"].createElement(_Button.Button, { icon: playing ? _react["default"].createElement(Icons.Pause, { color: iconColor }) : _react["default"].createElement(Icons.Play, { color: iconColor }), hoverIndicator: "background", onClick: playing ? this.pause : this.play }), _react["default"].createElement(_Box.Box, { direction: "row", align: "center", flex: true }, _react["default"].createElement(_Box.Box, { flex: true }, _react["default"].createElement(_Stack.Stack, null, _react["default"].createElement(_Meter.Meter, { "aria-label": "Video progress", background: over ? theme.video.scrubber && theme.video.scrubber.track && theme.video.scrubber.track.color || 'dark-3' : undefined, size: "full", thickness: "small", values: [{ value: percentagePlayed || 0 }] }), _react["default"].createElement(_StyledVideo.StyledVideoScrubber, { ref: scrubberRef, tabIndex: 0, role: "button", value: scrubTime ? Math.round(scrubTime / duration * 100) : undefined, onMouseMove: this.scrub, onMouseLeave: function onMouseLeave() { return _this2.setState({ scrubTime: undefined }); }, onClick: this.seek }))), _react["default"].createElement(_Box.Box, { pad: { horizontal: 'small' } }, _react["default"].createElement(_Text.Text, { margin: "none" }, formattedTime))), _react["default"].createElement(_Menu.Menu, { icon: _react["default"].createElement(Icons.Configure, { color: iconColor }), dropAlign: { bottom: 'top', right: 'right' }, dropBackground: background, items: [{ icon: _react["default"].createElement(Icons.Volume, { color: iconColor }), onClick: volume <= 1 - VOLUME_STEP ? this.louder : undefined, close: false }, { icon: _react["default"].createElement(Icons.ReduceVolume, { color: iconColor }), onClick: volume >= VOLUME_STEP ? this.quieter : undefined, close: false }].concat(captionControls, [{ icon: _react["default"].createElement(Icons.FullScreen, { color: iconColor }), onClick: this.fullscreen }]) }))); }; _proto.render = function render() { var _this$props2 = this.props, alignSelf = _this$props2.alignSelf, autoPlay = _this$props2.autoPlay, children = _this$props2.children, controls = _this$props2.controls, gridArea = _this$props2.gridArea, loop = _this$props2.loop, margin = _this$props2.margin, theme = _this$props2.theme, rest = _objectWithoutPropertiesLoose(_this$props2, ["alignSelf", "autoPlay", "children", "controls", "gridArea", "loop", "margin", "theme"]); var _this$state5 = this.state, height = _this$state5.height, videoRef = _this$state5.videoRef, width = _this$state5.width; var controlsElement = controls ? this.renderControls() : undefined; var mouseEventListeners; if (controls === 'over') { mouseEventListeners = { onMouseEnter: this.interactionStart, onMouseMove: this.interactionStart, onTouchStart: this.interactionStart }; } var style; if (rest.fit === 'contain' && controls === 'over') { // constrain the size to fit the aspect ratio so the controls overlap correctly if (width) { style = { width: width }; } else if (height) { style = { height: height }; } } return _react["default"].createElement(_StyledVideo.StyledVideoContainer, _extends({}, mouseEventListeners, { alignSelf: alignSelf, gridArea: gridArea, margin: margin, style: style }), _react["default"].createElement(_StyledVideo.StyledVideo, _extends({}, rest, { ref: videoRef }, this.mediaEventProps, { autoPlay: autoPlay || false, loop: loop || false }), children), controlsElement); }; return Video; }(_react.Component); Video.defaultProps = { controls: 'over' }; Object.setPrototypeOf(Video.defaultProps, _defaultProps.defaultProps); var VideoDoc; if (process.env.NODE_ENV !== 'production') { VideoDoc = require('./doc').doc(Video); // eslint-disable-line global-require } var VideoWrapper = (0, _recompose.compose)(_styledComponents.withTheme, _hocs.withForwardRef)(VideoDoc || Video); exports.Video = VideoWrapper;
juhalindfors/bazel-patches
src/test/java/com/google/devtools/build/lib/collect/nestedset/OrderTest.java
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.collect.nestedset; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.fail; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests for {@link com.google.devtools.build.lib.collect.nestedset.Order}. */ @RunWith(JUnit4.class) public class OrderTest { @Test public void testParsing() throws Exception { for (Order current : Order.values()) { assertThat(Order.parse(current.getSkylarkName())).isEqualTo(current); assertThat(Order.parse(current.getDeprecatedSkylarkName())).isEqualTo(current); } } @Test public void testForErrors() throws Exception { causeError(null); causeError(""); causeError("lol"); causeError("naive"); causeError("naivelink"); } private void causeError(String invalidName) throws Exception { try { Order.parse(invalidName); fail(); } catch (IllegalArgumentException ex) { assertThat(ex.getMessage()).startsWith("Invalid order"); } } }
manibhushan05/transiq
web/transiq/restapi/serializers/file_upload.py
<reponame>manibhushan05/transiq<filename>web/transiq/restapi/serializers/file_upload.py<gh_stars>0 from datetime import datetime from django.contrib.auth.models import User from rest_framework import serializers, ISO_8601 from rest_framework.validators import UniqueValidator, UniqueTogetherValidator from api import s3util from api.models import S3Upload from api.utils import get_ext from driver.models import Driver from fileupload.models import PODFile, VehicleFile, OwnerFile, DriverFile, ChequeFile, InvoiceReceiptFile, WeighingSlip from owner.models import Vehicle, Owner from restapi.helper_api import DATE_FORMAT, DATETIME_FORMAT from restapi.serializers.api import S3UploadSerializer from restapi.serializers.authentication import UserSerializer from restapi.serializers.driver import DriverSerializer from restapi.serializers.owner import OwnerSerializer, VehicleSerializer from restapi.serializers.sme import SmeSerializer from restapi.serializers.team import LrNumberSerializer, ManualBookingSerializer, InvoiceSerializer from sme.models import Sme from team.models import LrNumber, ManualBooking, Invoice class BasicPODFileSerializer(serializers.Serializer): id = serializers.IntegerField(label='ID', read_only=True) url = serializers.SerializerMethodField() lr = serializers.SerializerMethodField() booking = serializers.SerializerMethodField() def get_lr(self, instance): if isinstance(instance.lr_number, LrNumber): return {'id': instance.lr_number.id, 'lr_number': instance.lr_number.lr_number} return {} def get_booking(self, instance): if isinstance(instance.booking, ManualBooking): return {'id': instance.booking.id, 'booking_id': instance.booking.booking_id} return {} def get_url(self, instance): if isinstance(instance, PODFile) and isinstance(instance.s3_upload, S3Upload): return instance.s3_upload.public_url() return None def create(self, validated_data): pass def update(self, instance, validated_data): pass class PODFileSerializer(serializers.Serializer): id = serializers.IntegerField(label='ID', read_only=True) s3_thumb_url = serializers.URLField(allow_null=True, max_length=200, required=False, validators=[UniqueValidator(queryset=PODFile.objects.all())]) serial = serializers.CharField(max_length=20) s3_url = serializers.URLField(required=False) verified = serializers.BooleanField(default=False) is_valid = serializers.BooleanField(default=False) verified_datetime = serializers.DateTimeField( allow_null=True, required=False, format=DATE_FORMAT, input_formats=DATETIME_FORMAT) created_on = serializers.DateTimeField(read_only=True, format=DATE_FORMAT, input_formats=DATE_FORMAT) updated_on = serializers.DateTimeField(read_only=True) deleted = serializers.BooleanField(required=False) deleted_on = serializers.DateTimeField(allow_null=True, required=False) created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username") changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username") uploaded_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username") verified_by = serializers.SlugRelatedField(allow_null=True, queryset=User.objects.all(), required=False, slug_field="username") lr_number = serializers.PrimaryKeyRelatedField(write_only=True, allow_null=True, queryset=LrNumber.objects.all(), required=True) booking = serializers.PrimaryKeyRelatedField(write_only=True, queryset=ManualBooking.objects.all()) s3_upload = serializers.PrimaryKeyRelatedField(queryset=S3Upload.objects.all(), write_only=True, required=False) s3_upload_url = serializers.SerializerMethodField() # upload_file = serializers.SerializerMethodField() lr_number_data = serializers.SerializerMethodField() booking_id = serializers.SerializerMethodField() def get_lr_number_data(self, instance): if isinstance(instance.lr_number, LrNumber): return instance.lr_number.lr_number return None def get_booking_id(self, instance): if isinstance(instance.booking, ManualBooking): return instance.booking.booking_id return None class Meta: validators = [UniqueTogetherValidator(queryset=PODFile.objects.all(), fields=('lr_number', 'serial'))] def validate_created_by(self, value): if isinstance(self.instance, PODFile) and value: raise serializers.ValidationError("Created by is immutable") return value def validate_uploaded_by(self, value): if isinstance(self.instance, PODFile) and value: raise serializers.ValidationError("Uploaded by is immutable") return value def get_s3_upload_url(self, instance): if isinstance(instance, PODFile) and isinstance(instance.s3_upload, S3Upload): return instance.s3_upload.public_url() return None def create(self, validated_data): instance = PODFile.objects.create(**validated_data) if isinstance(instance.booking, ManualBooking): ManualBooking.objects.filter(id=instance.booking.id).update( pod_status='unverified', pod_date=datetime.now()) return instance def update(self, instance, validated_data): PODFile.objects.filter(id=instance.id).update(**validated_data) return PODFile.objects.get(id=instance.id) class WeighingSlipSerializer(serializers.Serializer): id = serializers.IntegerField(label='ID', read_only=True) s3_thumb_url = serializers.URLField(allow_null=True, max_length=200, required=False, validators=[UniqueValidator(queryset=WeighingSlip.objects.all())]) serial = serializers.CharField(max_length=20) s3_url = serializers.URLField(required=False) verified = serializers.BooleanField(default=False) is_valid = serializers.BooleanField(default=False) verified_datetime = serializers.DateTimeField( allow_null=True, required=False, format=DATE_FORMAT, input_formats=DATETIME_FORMAT) created_on = serializers.DateTimeField(read_only=True, format=DATE_FORMAT, input_formats=DATE_FORMAT) updated_on = serializers.DateTimeField(read_only=True) deleted = serializers.BooleanField(required=False) deleted_on = serializers.DateTimeField(allow_null=True, required=False) created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username") changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username") uploaded_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username") verified_by = serializers.SlugRelatedField(allow_null=True, queryset=User.objects.all(), required=False, slug_field="username") booking = serializers.PrimaryKeyRelatedField(write_only=True, queryset=ManualBooking.objects.all()) s3_upload = serializers.PrimaryKeyRelatedField(queryset=S3Upload.objects.all(), write_only=True, required=False) s3_upload_url = serializers.SerializerMethodField() # upload_file = serializers.SerializerMethodField() lr_number_data = serializers.SerializerMethodField() booking_id = serializers.SerializerMethodField() def get_lr_number_data(self, instance): if isinstance(instance.lr_number, LrNumber): return instance.lr_number.lr_number return None def get_booking_id(self, instance): if isinstance(instance.booking, ManualBooking): return instance.booking.booking_id return None def validate_created_by(self, value): if isinstance(self.instance, WeighingSlip) and value: raise serializers.ValidationError("Created by is immutable") return value def validate_uploaded_by(self, value): if isinstance(self.instance, WeighingSlip) and value: raise serializers.ValidationError("Uploaded by is immutable") return value def get_s3_upload_url(self, instance): if isinstance(instance, WeighingSlip) and isinstance(instance.s3_upload, S3Upload): return instance.s3_upload.public_url() return None def create(self, validated_data): instance = WeighingSlip.objects.create(**validated_data) return instance def update(self, instance, validated_data): WeighingSlip.objects.filter(id=instance.id).update(**validated_data) return WeighingSlip.objects.get(id=instance.id) class VehicleFileSerializer(serializers.Serializer): id = serializers.IntegerField(label='ID', read_only=True) document_category = serializers.ChoiceField(choices=( ('PUC', 'Puc Certificate'), ('FIT', 'Fitness Certificate'), ('REG', 'Registration Certificate'), ('PERM', 'Permission Certificate'), ('INS', 'Insurance Certificate'))) s3_url = serializers.URLField(max_length=200, validators=[UniqueValidator(queryset=VehicleFile.objects.all())]) s3_thumb_url = serializers.URLField(allow_null=True, max_length=200, required=False, validators=[UniqueValidator(queryset=VehicleFile.objects.all())]) serial = serializers.CharField(max_length=20) verified = serializers.BooleanField() is_valid = serializers.BooleanField() created_on = serializers.DateTimeField(read_only=True) updated_on = serializers.DateTimeField(read_only=True) deleted = serializers.BooleanField(required=False) deleted_on = serializers.DateTimeField(allow_null=True, required=False) created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username") changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username") uploaded_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username") vehicle = serializers.PrimaryKeyRelatedField(queryset=Vehicle.objects.all(),required=False) s3_upload = serializers.PrimaryKeyRelatedField(queryset=S3Upload.objects.all()) # class Meta: # validators = [UniqueTogetherValidator(queryset=VehicleFile.objects.all(), fields=('vehicle', 'serial'))] def validate_created_by(self, value): if isinstance(self.instance, VehicleFile) and value: raise serializers.ValidationError("Created by is immutable") return value def validate_uploaded_by(self, value): if isinstance(self.instance, VehicleFile) and value: raise serializers.ValidationError("Uploaded by is immutable") return value def to_representation(self, instance): self.fields["vehicle"] = VehicleSerializer(read_only=True) self.fields["booking"] = ManualBookingSerializer(read_only=True) self.fields["s3_upload"] = S3UploadSerializer(read_only=True) return super().to_representation(instance=instance) def create(self, validated_data): instance = VehicleFile.objects.create(**validated_data) return instance def update(self, instance, validated_data): VehicleFile.objects.filter(id=instance.id).update(**validated_data) return VehicleFile.objects.get(id=instance.id) class OwnerFileSerializer(serializers.Serializer): id = serializers.IntegerField(label='ID', read_only=True) document_category = serializers.ChoiceField(choices=( ('PAN', 'PAN Card'), ('DL', 'Driving Licence'), ('EL', 'Election ID'), ('AC', 'Aadhar Card'), ('PT', 'Passport'), ('RC', 'Ration Card'), ('DEC', 'Declaration'))) s3_url = serializers.URLField(max_length=200, validators=[UniqueValidator(queryset=OwnerFile.objects.all())]) s3_thumb_url = serializers.URLField(allow_null=True, max_length=200, required=False, validators=[UniqueValidator(queryset=OwnerFile.objects.all())]) serial = serializers.CharField(max_length=20, required=True) verified = serializers.BooleanField(required=False) is_valid = serializers.BooleanField() created_on = serializers.DateTimeField(read_only=True) updated_on = serializers.DateTimeField(read_only=True) deleted = serializers.BooleanField(required=False) deleted_on = serializers.DateTimeField(allow_null=True, required=False) created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username") changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username") uploaded_by = serializers.SlugRelatedField(allow_null=True, queryset=User.objects.all(), required=False, slug_field="username") owner = serializers.PrimaryKeyRelatedField(queryset=Owner.objects.all(),allow_null=True, required=False) s3_upload = serializers.PrimaryKeyRelatedField(queryset=S3Upload.objects.all()) # class Meta: # validators = [UniqueTogetherValidator(queryset=OwnerFile.objects.all(), fields=('owner', 'serial'))] def validate_created_by(self, value): if isinstance(self.instance, OwnerFile) and value: raise serializers.ValidationError("Created by is immutable") return value def validate_uploaded_by(self, value): if isinstance(self.instance, OwnerFile) and value: raise serializers.ValidationError("Uploaded by is immutable") return value def to_representation(self, instance): self.fields["owner"] = OwnerSerializer(read_only=True) self.fields["s3_upload"] = S3UploadSerializer(read_only=True) return super().to_representation(instance=instance) def create(self, validated_data): instance = OwnerFile.objects.create(**validated_data) return instance def update(self, instance, validated_data): OwnerFile.objects.filter(id=instance.id).update(**validated_data) return OwnerFile.objects.get(id=instance.id) class DriverFileSerializer(serializers.Serializer): id = serializers.IntegerField(label='ID', read_only=True) document_category = serializers.ChoiceField(allow_null=True, choices=( ('PAN', 'PAN Card'), ('DL', 'Driving Licence'), ('EL', 'Election ID'), ('AC', 'Aadhar Card'), ('PT', 'Passport'), ('RC', 'Ration Card')), required=False) s3_url = serializers.URLField(max_length=200, validators=[UniqueValidator(queryset=DriverFile.objects.all())]) s3_thumb_url = serializers.URLField(allow_null=True, max_length=200, required=False, validators=[UniqueValidator(queryset=DriverFile.objects.all())]) verified = serializers.BooleanField() is_valid = serializers.BooleanField() serial = serializers.CharField(max_length=20) created_on = serializers.DateTimeField(read_only=True) updated_on = serializers.DateTimeField(read_only=True) deleted = serializers.BooleanField(required=False) deleted_on = serializers.DateTimeField(allow_null=True, required=False) created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username") changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username") uploaded_by = serializers.SlugRelatedField(allow_null=True, queryset=User.objects.all(), required=False, slug_field="username") driver = serializers.PrimaryKeyRelatedField(queryset=Driver.objects.all(), required=False) s3_upload = serializers.PrimaryKeyRelatedField(queryset=S3Upload.objects.all()) # class Meta: # validators = [UniqueTogetherValidator(queryset=DriverFile.objects.all(), fields=('driver', 'serial'))] def validate_created_by(self, value): if isinstance(self.instance, DriverFile) and value: raise serializers.ValidationError("Created by is immutable") return value def validate_uploaded_by(self, value): if isinstance(self.instance, DriverFile) and value: raise serializers.ValidationError("Uploaded by is immutable") return value def to_representation(self, instance): self.fields["driver"] = DriverSerializer(read_only=True) self.fields["s3_upload"] = S3UploadSerializer(read_only=True) return super().to_representation(instance=instance) def create(self, validated_data): instance = DriverFile.objects.create(**validated_data) return instance def update(self, instance, validated_data): DriverFile.objects.filter(id=instance.id).update(**validated_data) return DriverFile.objects.get(id=instance.id) class ChequeFileSerializer(serializers.Serializer): id = serializers.IntegerField(label='ID', read_only=True) s3_url = serializers.URLField(allow_null=True, max_length=200, required=False, validators=[UniqueValidator(queryset=ChequeFile.objects.all())]) resolved_datetime = serializers.DateTimeField(allow_null=True, required=False) customer_name = serializers.CharField(max_length=300) amount = serializers.IntegerField(max_value=50000000, min_value=0, required=False) cheque_number = serializers.CharField(max_length=6, min_length=6) cheque_date = serializers.DateField(format=DATE_FORMAT, input_formats=[DATE_FORMAT, ISO_8601]) remarks = serializers.CharField(allow_null=True, max_length=300, required=False) is_valid = serializers.BooleanField() resolved = serializers.BooleanField(required=False) serial = serializers.CharField(max_length=20, required=True) created_on = serializers.DateTimeField(read_only=True) updated_on = serializers.DateTimeField(read_only=True) deleted = serializers.BooleanField(required=False) deleted_on = serializers.DateTimeField(allow_null=True, required=False) created_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username") changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username") uploaded_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username") resolved_by = serializers.SlugRelatedField(queryset=User.objects.all(), required=False, slug_field="username") customer = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Sme.objects.all(), required=False) s3_upload = serializers.PrimaryKeyRelatedField(queryset=S3Upload.objects.all()) public_url = serializers.SerializerMethodField() class Meta: validators = [UniqueTogetherValidator(queryset=ChequeFile.objects.all(), fields=('customer_name', 'serial'))] def validate_created_by(self, value): if isinstance(self.instance, ChequeFile) and value: raise serializers.ValidationError("Created by is immutable") return value def get_public_url(self, instance): if isinstance(instance, ChequeFile) and isinstance(instance.s3_upload, S3Upload): return instance.s3_upload.public_url() return None def validate_uploaded_by(self, value): if isinstance(self.instance, ChequeFile) and value: raise serializers.ValidationError("Uploaded by is immutable") return value def to_representation(self, instance): # self.fields["customer"] = SmeSerializer(read_only=True) self.fields["s3_upload"] = S3UploadSerializer(read_only=True) return super().to_representation(instance=instance) def create(self, validated_data): instance = ChequeFile.objects.create(**validated_data) return instance def update(self, instance, validated_data): ChequeFile.objects.filter(id=instance.id).update(**validated_data) return ChequeFile.objects.get(id=instance.id) class InvoiceReceiptFileSerializer(serializers.Serializer): id = serializers.IntegerField(label='ID', read_only=True) invoice_number = serializers.CharField(max_length=50, required=False) verified = serializers.BooleanField(default=False) is_valid = serializers.BooleanField(default=False) serial = serializers.CharField(max_length=20, required=False) invoice_sent_mode = serializers.CharField(allow_null=True, allow_blank=True, max_length=20, required=False) invoice_confirm_mode = serializers.CharField(allow_null=True, max_length=20, required=False) invoice_confirm_by_name = serializers.CharField(allow_null=True, max_length=20, required=False) invoice_confirm_by_phone = serializers.CharField(allow_null=True, allow_blank=True, max_length=20, required=False) created_on = serializers.DateTimeField(read_only=True) updated_on = serializers.DateTimeField(read_only=True) deleted = serializers.BooleanField(required=False) deleted_on = serializers.DateTimeField(allow_null=True, required=False) created_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username") changed_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username") uploaded_by = serializers.SlugRelatedField(queryset=User.objects.all(), slug_field="username") invoice_receipt = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=Invoice.objects.all(), required=False) s3_upload = serializers.PrimaryKeyRelatedField(allow_null=True, queryset=S3Upload.objects.all(), required=False) def validate_created_by(self, value): if isinstance(self.instance, InvoiceReceiptFile) and value: raise serializers.ValidationError("Created by is immutable") return value def validate_uploaded_by(self, value): if isinstance(self.instance, InvoiceReceiptFile) and value: raise serializers.ValidationError("Uploaded by is immutable") return value def validate_deleted(self, attrs): if isinstance(self.instance, InvoiceReceiptFile) and not attrs: if InvoiceReceiptFile.objects.filter(invoice_number=self.instance.invoice_number): raise serializers.ValidationError("Invoice number must be unique") return attrs def to_representation(self, instance): # self.fields["invoice_receipt"] = InvoiceSerializer(read_only=True) self.fields["s3_upload"] = S3UploadSerializer(read_only=True) return super().to_representation(instance=instance) def create(self, validated_data): instance = InvoiceReceiptFile.objects.create(**validated_data) return instance def update(self, instance, validated_data): InvoiceReceiptFile.objects.filter(id=instance.id).update(**validated_data) return InvoiceReceiptFile.objects.get(id=instance.id)
m-m-m/query
api/src/main/java/net/sf/mmm/query/api/feature/FeatureInsert.java
<filename>api/src/main/java/net/sf/mmm/query/api/feature/FeatureInsert.java /* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0 * http://www.apache.org/licenses/LICENSE-2.0 */ package net.sf.mmm.query.api.feature; import net.sf.mmm.query.api.path.EntityAlias; import net.sf.mmm.query.api.statement.InsertStatement; /** * {@link StatementFactoryFeature} for a {@link net.sf.mmm.query.api.statement.StatementFactory} supporting a * {@link InsertStatement}. * * @author hohwille * @since 8.5.0 */ public abstract interface FeatureInsert extends StatementFactoryFeature { /** * Creates a regular {@link InsertStatement} ({@code DELETE FROM alias.source [AS alias.name] ...}). * * @param <E> the generic type of the entity to create (insert). * @param alias the {@link EntityAlias} to create (insert). * @return the new {@link InsertStatement}. */ <E> InsertStatement<E, ?> insertInto(EntityAlias<E> alias); }
germix/sanos
src/sys/dev/3c905c.h
<reponame>germix/sanos // // 3c905c.h // // 3Com 3C905C NIC network driver // // 3Com 3C905C NIC network driver // // Copyright (C) 2002 <NAME>. All rights reserved. // Copyright (C) 1999 3Com Corporation. All rights reserved. // // 3Com Network Driver software is distributed as is, without any warranty // of any kind, either express or implied as further specified in the GNU Public // License. This software may be used and distributed according to the terms of // the GNU Public License, located in the file LICENSE. // // 3Com and EtherLink are registered trademarks of 3Com Corporation. // #ifndef _3C905C_H #define _3C905C_H #define ETHER_FRAME_LEN 1544 #define EEPROM_SIZE 0x21 #define RX_COPYBREAK 128 #define TX_TIMEOUT 5000 #define TX_RING_SIZE 16 #define RX_RING_SIZE 32 #define TX_MAX_FRAGS 16 #define LAST_FRAG 0x80000000 // Last entry in descriptor #define DN_COMPLETE 0x00010000 // This packet has been downloaded #define UP_COMPLETE 0x00008000 // This packet has been uploaded // // PCI IDs // #define UNITCODE_3C905B1 PCI_UNITCODE(0x10B7, 0x9055) #define UNITCODE_3C905C PCI_UNITCODE(0x10B7, 0x9200) #define UNITCODE_3C9051 PCI_UNITCODE(0x10B7, 0x9050) // // Commands // #define CMD_RESET (0x0 << 0xB) #define CMD_SELECT_WINDOW (0x1 << 0xB) #define CMD_ENABLE_DC_CONVERTER (0x2 << 0xB) #define CMD_RX_DISABLE (0x3 << 0xB) #define CMD_RX_ENABLE (0x4 << 0xB) #define CMD_RX_RESET (0x5 << 0xB) #define CMD_UP_STALL ((0x6 << 0xB) | 0x0) #define CMD_UP_UNSTALL ((0x6 << 0xB) | 0x1) #define CMD_DOWN_STALL ((0x6 << 0xB) | 0x2) #define CMD_DOWN_UNSTALL ((0x6 << 0xB) | 0x3) #define CMD_TX_DONE (0x7 << 0xB) #define CMD_RX_DISCARD (0x8 << 0xB) #define CMD_TX_ENABLE (0x9 << 0xB) #define CMD_TX_DISABLE (0xA << 0xB) #define CMD_TX_RESET (0xB << 0xB) #define CMD_REQUEST_INTERRUPT (0xC << 0xB) #define CMD_ACKNOWLEDGE_INTERRUPT (0xD << 0xB) #define CMD_SET_INTERRUPT_ENABLE (0xE << 0xB) #define CMD_SET_INDICATION_ENABLE (0xF << 0xB) #define CMD_SET_RX_FILTER (0x10 << 0xB) #define CMD_TX_AGAIN (0x13 << 0xB) #define CMD_STATISTICS_ENABLE (0x15 << 0xB) #define CMD_STATISTICS_DISABLE (0x16 << 0xB) #define CMD_DISABLE_DC_CONVERTER (0x17 << 0xB) #define CMD_SET_HASH_FILTER_BIT (0x19 << 0xB) #define CMD_TX_FIFO_BISECT (0x1B << 0xB) // // Non-windowed registers // #define CMD 0x0E #define STATUS 0x0E #define TIMER 0x1A #define TX_STATUS 0x1B #define INT_STATUS_AUTO 0x1E #define DMA_CONTROL 0x20 #define DOWN_LIST_POINTER 0x24 #define DOWN_POLL 0x2D #define UP_PACKET_STATUS 0x30 #define FREE_TIMER 0x34 #define COUNTDOWN 0x36 #define UP_LIST_POINTER 0x38 #define UP_POLL 0x3D #define REAL_TIME_COUNTER 0x40 #define CONFIG_ADDRESS 0x44 #define CONFIG_DATA 0x48 #define DEBUG_DATA 0x70 #define DEBUG_CONTROL 0x74 // // Window 0 // #define BIOS_ROM_ADDR 0x04 #define BIOS_ROM_DATA 0x08 #define EEPROM_CMD 0x0A #define EEPROM_DATA 0x0C #define EEPROM_CMD_SUB 0x0000 #define EEPROM_CMD_WRITE 0x0040 #define EEPROM_CMD_READ 0x0080 #define EEPROM_CMD_ERASE 0x00C0 #define EEPROM_BUSY 0x8000 // // Window 1 // // // Window 2 // #define STATION_ADDRESS_LOW 0x00 #define STATION_ADDRESS_MID 0x02 #define STATION_ADDRESS_HIGH 0x04 // // Window 3 // #define INTERNAL_CONFIG 0x00 #define MAXIMUM_PACKET_SIZE 0x04 #define MAC_CONTROL 0x06 #define MEDIA_OPTIONS 0x08 #define RX_FREE 0x0A #define TX_FREE 0x0C // // Window 4 // #define NETWORK_DIAGNOSTICS 0x06 #define PHYSICAL_MANAGEMENT 0x08 #define MEDIA_STATUS 0x0A #define BAD_SSD 0x0C #define UPPER_BYTES_OK 0x0D // // Window 5 // #define RX_FILTER 0x08 #define INTERRUPT_ENABLE 0x0A #define INDICATION_ENABLE 0x0C // // Window 6 // #define CARRIER_LOST 0x00 #define SQE_ERRORS 0x01 #define MULTIPLE_COLLISIONS 0x02 #define SINGLE_COLLISIONS 0x03 #define LATE_COLLISIONS 0x04 #define RX_OVERRUNS 0x05 #define FRAMES_XMITTED_OK 0x06 #define FRAMES_RECEIVED_OK 0x07 #define FRAMES_DEFERRED 0x08 #define UPPER_FRAMES_OK 0x09 #define BYTES_RECEIVED_OK 0x0A #define BYTES_XMITTED_OK 0x0C #define FIRST_BYTE_STAT 0x00 #define LAST_BYTE_STAT 0x09 // // Window 7 // // // TX status flags // #define TX_STATUS_MAXIMUM_COLLISION (1 << 3) #define TX_STATUS_HWERROR (1 << 4) #define TX_STATUS_JABBER (1 << 5) #define TX_STATUS_INTERRUPT_REQUESTED (1 << 6) #define TX_STATUS_COMPLETE (1 << 7) // // Global reset flags // #define GLOBAL_RESET_MASK_TP_AUI_RESET (1 << 0) #define GLOBAL_RESET_MASK_ENDEC_RESET (1 << 1) #define GLOBAL_RESET_MASK_NETWORK_RESET (1 << 2) #define GLOBAL_RESET_MASK_FIFO_RESET (1 << 3) #define GLOBAL_RESET_MASK_AISM_RESET (1 << 4) #define GLOBAL_RESET_MASK_HOST_RESET (1 << 5) #define GLOBAL_RESET_MASK_SMB_RESET (1 << 6) #define GLOBAL_RESET_MASK_VCO_RESET (1 << 7) #define GLOBAL_RESET_MASK_UP_DOWN_RESET (1 << 8) // // TX reset flags // #define TX_RESET_MASK_TP_AUI_RESET (1 << 0) #define TX_RESET_MASK_ENDEC_RESET (1 << 1) #define TX_RESET_MASK_NETWORK_RESET (1 << 2) #define TX_RESET_MASK_FIFO_RESET (1 << 3) #define TX_RESET_MASK_DOWN_RESET (1 << 8) // // RX reset flags // #define RX_RESET_MASK_TP_AUI_RESET (1 << 0) #define RX_RESET_MASK_ENDEC_RESET (1 << 1) #define RX_RESET_MASK_NETWORK_RESET (1 << 2) #define RX_RESET_MASK_FIFO_RESET (1 << 3) #define RX_RESET_MASK_UP_RESET (1 << 8) // // IntStatus flags // #define INTSTATUS_INT_LATCH (1 << 0) #define INTSTATUS_HOST_ERROR (1 << 1) #define INTSTATUS_TX_COMPLETE (1 << 2) #define INTSTATUS_RX_COMPLETE (1 << 4) #define INTSTATUS_RX_EARLY (1 << 5) #define INTSTATUS_INT_REQUESTED (1 << 6) #define INTSTATUS_UPDATE_STATS (1 << 7) #define INTSTATUS_LINK_EVENT (1 << 8) #define INTSTATUS_DN_COMPLETE (1 << 9) #define INTSTATUS_UP_COMPLETE (1 << 10) #define INTSTATUS_CMD_IN_PROGRESS (1 << 12) #define ALL_INTERRUPTS 0x06EE // // AcknowledgeInterrupt flags // #define INTERRUPT_LATCH_ACK 0x0001 #define LINK_EVENT_ACK 0x0002 #define RX_EARLY_ACK 0x0020 #define INT_REQUESTED_ACK 0x0040 #define DN_COMPLETE_ACK 0x0200 #define UP_COMPLETE_ACK 0x0400 #define ALL_ACK 0x07FF // // RxFilter // #define RECEIVE_INDIVIDUAL 0x01 #define RECEIVE_MULTICAST 0x02 #define RECEIVE_BROADCAST 0x04 #define RECEIVE_ALL_FRAMES 0x08 #define RECEIVE_MULTICAST_HASH 0x10 // // UpStatus // #define UP_PACKET_STATUS_ERROR (1 << 14) #define UP_PACKET_STATUS_COMPLETE (1 << 15) #define UP_PACKET_STATUS_OVERRUN (1 << 16) #define UP_PACKET_STATUS_RUNT_FRAME (1 << 17) #define UP_PACKET_STATUS_ALIGNMENT_ERROR (1 << 18) #define UP_PACKET_STATUS_CRC_ERROR (1 << 19) #define UP_PACKET_STATUS_OVERSIZE_FRAME (1 << 20) #define UP_PACKET_STATUS_DRIBBLE_BITS (1 << 23) #define UP_PACKET_STATUS_OVERFLOW (1 << 24) #define UP_PACKET_STATUS_IP_CHECKSUM_ERROR (1 << 25) #define UP_PACKET_STATUS_TCP_CHECKSUM_ERROR (1 << 26) #define UP_PACKET_STATUS_UDP_CHECKSUM_ERROR (1 << 27) #define UP_PACKET_STATUS_IMPLIED_BUFFER_ENABLE (1 << 28) #define UP_PACKET_STATUS_IP_CHECKSUM_CHECKED (1 << 29) #define UP_PACKET_STATUS_TCP_CHECKSUM_CHECKED (1 << 30) #define UP_PACKET_STATUS_UDP_CHECKSUM_CHECKED (1 << 31) #define UP_PACKET_STATUS_ERROR_MASK 0x1F0000 // // Frame Start Header // #define FSH_CRC_APPEND_DISABLE (1 << 13) #define FSH_TX_INDICATE (1 << 15) #define FSH_DOWN_COMPLETE (1 << 16) #define FSH_LAST_KEEP_ALIVE_PACKET (1 << 24) #define FSH_ADD_IP_CHECKSUM (1 << 25) #define FSH_ADD_TCP_CHECKSUM (1 << 26) #define FSH_ADD_UDP_CHECKSUM (1 << 27) #define FSH_ROUND_UP_DEFEAT (1 << 28) #define FSH_DPD_EMPTY (1 << 29) #define FSH_DOWN_INDICATE (1 << 31) // // Internal Config // #define INTERNAL_CONFIG_DISABLE_BAD_SSD (1 << 8) #define INTERNAL_CONFIG_ENABLE_TX_LARGE (1 << 14) #define INTERNAL_CONFIG_ENABLE_RX_LARGE (1 << 15) #define INTERNAL_CONFIG_AUTO_SELECT (1 << 24) #define INTERNAL_CONFIG_DISABLE_ROM (1 << 25) #define INTERNAL_CONFIG_TRANSCEIVER_MASK 0x00F00000 #define INTERNAL_CONFIG_TRANSCEIVER_SHIFT 20 // // Connector types // #define CONNECTOR_10BASET 0 #define CONNECTOR_10AUI 1 #define CONNECTOR_10BASE2 3 #define CONNECTOR_100BASETX 4 #define CONNECTOR_100BASEFX 5 #define CONNECTOR_MII 6 #define CONNECTOR_AUTONEGOTIATION 8 #define CONNECTOR_EXTERNAL_MII 9 #define CONNECTOR_UNKNOWN 0xFF // // Physical Management // #define PHY_WRITE 0x0004 // Write to PHY (drive MDIO) #define PHY_DATA1 0x0002 // MDIO data bit #define PHY_CLOCK 0x0001 // MII clock signal #define MII_PHY_ADDRESS 0x0C00 #define MII_PHY_ADDRESS_READ (MII_PHY_ADDRESS | 0x6000) #define MII_PHY_ADDRESS_WRITE (MII_PHY_ADDRESS | 0x5002) // // DMA control // #define DMA_CONTROL_DOWN_STALLED (1 << 2) #define DMA_CONTROL_UP_COMPLETE (1 << 3) #define DMA_CONTROL_DOWN_COMPLETE (1 << 4) #define DMA_CONTROL_ARM_COUNTDOWN (1 << 6) #define DMA_CONTROL_DOWN_IN_PROGRESS (1 << 7) #define DMA_CONTROL_COUNTER_SPEED (1 << 8) #define DMA_CONTROL_COUNTDOWN_MODE (1 << 9) #define DMA_CONTROL_DOWN_SEQ_DISABLE (1 << 17) #define DMA_CONTROL_DEFEAT_MWI (1 << 20) #define DMA_CONTROL_DEFEAT_MRL (1 << 21) #define DMA_CONTROL_UPOVERDISC_DISABLE (1 << 22) #define DMA_CONTROL_TARGET_ABORT (1 << 30) #define DMA_CONTROL_MASTER_ABORT (1 << 31) // // Media status // #define MEDIA_STATUS_SQE_STATISTICS_ENABLE (1 << 3) #define MEDIA_STATUS_CARRIER_SENSE (1 << 5) #define MEDIA_STATUS_JABBER_GUARD_ENABLE (1 << 6) #define MEDIA_STATUS_LINK_BEAT_ENABLE (1 << 7) #define MEDIA_STATUS_LINK_DETECT (1 << 11) #define MEDIA_STATUS_TX_IN_PROGRESS (1 << 12) #define MEDIA_STATUS_DC_CONVERTER_ENABLED (1 << 14) // // Media Options // #define MEDIA_OPTIONS_100BASET4_AVAILABLE (1 << 0) #define MEDIA_OPTIONS_100BASETX_AVAILABLE (1 << 1) #define MEDIA_OPTIONS_100BASEFX_AVAILABLE (1 << 2) #define MEDIA_OPTIONS_10BASET_AVAILABLE (1 << 3) #define MEDIA_OPTIONS_10BASE2_AVAILABLE (1 << 4) #define MEDIA_OPTIONS_10AUI_AVAILABLE (1 << 5) #define MEDIA_OPTIONS_MII_AVAILABLE (1 << 6) #define MEDIA_OPTIONS_10BASEFL_AVAILABLE (1 << 8) // // MAC Control // #define MAC_CONTROL_FULL_DUPLEX_ENABLE (1 << 5) #define MAC_CONTROL_ALLOW_LARGE_PACKETS (1 << 6) #define MAC_CONTROL_FLOW_CONTROL_ENABLE (1 << 8) // // Network diagnostics // #define NETWORK_DIAGNOSTICS_ASIC_REVISION 0x003E #define NETWORK_DIAGNOSTICS_ASIC_REVISION_LOW 0x000E #define NETWORK_DIAGNOSTICS_UPPER_BYTES_ENABLE (1 << 6) // // MII Registers // #define MII_PHY_CONTROL 0 // Control reg address #define MII_PHY_STATUS 1 // Status reg address #define MII_PHY_OUI 2 // Most of the OUI bits #define MII_PHY_MODEL 3 // Model/rev bits, and rest of OUI #define MII_PHY_ANAR 4 // Auto negotiate advertisement reg #define MII_PHY_ANLPAR 5 // Auto negotiate link partner reg #define MII_PHY_ANER 6 // Auto negotiate expansion reg // // MII control register // #define MII_CONTROL_RESET 0x8000 // Reset bit in control reg #define MII_CONTROL_100MB 0x2000 // 100Mbit or 10 Mbit flag #define MII_CONTROL_ENABLE_AUTO 0x1000 // Autonegotiate enable #define MII_CONTROL_ISOLATE 0x0400 // Islolate bit #define MII_CONTROL_START_AUTO 0x0200 // Restart autonegotiate #define MII_CONTROL_FULL_DUPLEX 0x0100 // Full duplex // // MII status register // #define MII_STATUS_100MB_MASK 0xE000 // Any of these indicate 100 Mbit #define MII_STATUS_10MB_MASK 0x1800 // Either of these indicate 10 Mbit #define MII_STATUS_AUTO_DONE 0x0020 // Auto negotiation complete #define MII_STATUS_AUTO 0x0008 // Auto negotiation is available #define MII_STATUS_LINK_UP 0x0004 // Link status bit #define MII_STATUS_EXTENDED 0x0001 // Extended regs exist #define MII_STATUS_100T4 0x8000 // Capable of 100BT4 #define MII_STATUS_100TXFD 0x4000 // Capable of 100BTX full duplex #define MII_STATUS_100TX 0x2000 // Capable of 100BTX #define MII_STATUS_10TFD 0x1000 // Capable of 10BT full duplex #define MII_STATUS_10T 0x0800 // Capable of 10BT // // MII Auto-Negotiation Link Partner Ability // #define MII_ANLPAR_100T4 0x0200 // Support 100BT4 #define MII_ANLPAR_100TXFD 0x0100 // Support 100BTX full duplex #define MII_ANLPAR_100TX 0x0080 // Support 100BTX half duplex #define MII_ANLPAR_10TFD 0x0040 // Support 10BT full duplex #define MII_ANLPAR_10T 0x0020 // Support 10BT half duplex // // MII Auto-Negotiation Advertisement // #define MII_ANER_LPANABLE 0x0001 // Link partner autonegotiatable ? #define MII_ANAR_100T4 0x0200 // Support 100BT4 #define MII_ANAR_100TXFD 0x0100 // Support 100BTX full duplex #define MII_ANAR_100TX 0x0080 // Support 100BTX half duplex #define MII_ANAR_10TFD 0x0040 // Support 10BT full duplex #define MII_ANAR_10T 0x0020 // Support 10BT half duplex #define MII_ANAR_FLOWCONTROL 0x0400 // Support Flow Control #define MII_ANAR_MEDIA_MASK 0x07E0 // Mask the media selection bits #define MII_ANAR_MEDIA_100_MASK (MII_ANAR_100TXFD | MII_ANAR_100TX) #define MII_ANAR_MEDIA_10_MASK (MII_ANAR_10TFD | MII_ANAR_10T) // // EEPROM contents // #define EEPROM_NODE_ADDRESS1 0x00 #define EEPROM_NODE_ADDRESS2 0x01 #define EEPROM_NODE_ADDRESS3 0x02 #define EEPROM_DEVICE_ID 0x03 #define EEPROM_MANUFACT_DATE 0x04 #define EEPROM_MANUFACT_DIVISION 0x05 #define EEPROM_MANUFACT_PRODCODE 0x06 #define EEPROM_MANUFACT_ID 0x07 #define EEPROM_PCI_PARM 0x08 #define EEPROM_ROM_INFO 0x09 #define EEPROM_OEM_NODE_ADDRESS1 0x0A #define EEPROM_OEM_NODE_ADDRESS2 0x0B #define EEPROM_OEM_NODE_ADDRESS3 0x0C #define EEPROM_SOFTWARE_INFO1 0x0D #define EEPROM_COMPAT_WORD 0x0E #define EEPROM_SOFTWARE_INFO2 0x0F #define EEPROM_CAPABILITIES_WORD 0x10 #define EEPROM_RESERVED_11 0x11 #define EEPROM_INTERNAL_CONFIG0 0x12 #define EEPROM_INTERNAL_CONFIG1 0x13 #define EEPROM_RESERVED_14 0x14 #define EEPROM_SOFTWARE_INFO3 0x15 #define EEPROM_LANWORKD_DATA1 0x16 #define EEPROM_SUBSYSTEM_VENDOR 0x17 #define EEPROM_SUBSYSTEM_ID 0x18 #define EEPROM_MEDIA_OPTIONS 0x19 #define EEPROM_LANWORKD_DATA2 0x1A #define EEPROM_SMB_ADDRESS 0x1B #define EEPROM_PCI_PARM2 0x1C #define EEPROM_PCI_PARM3 0x1D #define EEPROM_RESERVED_1E 0x1E #define EEPROM_RESERVED_1F 0x1F #define EEPROM_CHECKSUM1 0x20 // // EEPROM software information 1 // #define LINK_BEAT_DISABLE (1 << 14) // // EEPROM software information 2 // #define ENABLE_MWI_WORK 0x0020 // // MII Transceiver Type // #define MIISELECT_GENERIC 0x0000 #define MIISELECT_100BT4 0x0001 #define MIISELECT_10BT 0x0002 #define MIISELECT_100BTX 0x0003 #define MIISELECT_10BT_ANE 0x0004 #define MIISELECT_100BTX_ANE 0x0005 #define MIITXTYPE_MASK 0x000F #endif
Boundarybreaker/Landmark
src/main/java/dev/hephaestus/landmark/impl/names/provider/types/Selector.java
package dev.hephaestus.landmark.impl.names.provider.types; import java.util.ArrayList; import dev.hephaestus.landmark.impl.names.provider.MultiComponentProvider; import dev.hephaestus.landmark.impl.names.provider.NameComponentProvider; import net.minecraft.text.MutableText; import net.minecraft.util.Identifier; public class Selector extends MultiComponentProvider { private final ArrayList<NameComponentProvider> components = new ArrayList<>(); public Selector(Identifier id) { super(id); } public Selector(NameComponentProvider parent) { super(parent); } @Override public void addComponent(NameComponentProvider provider) { this.components.add(provider); } @Override public MutableText generateComponent() { return this.components.get((int) (Math.random() * components.size())).generateComponent(); } }
jameszhan/notes-ml
06-programing-languages/01-pythons/03-just-for-fun/03-pdlt/10_conversion_calculator.py
# -*- coding: utf-8 -*- def eval(e): if type(e) == type(1): return (e, {}) # <--- (e,{}) common mistake: e if type(e) == type(1.1): return (e, {}) # <--- (e,{}) common mistake: e if type(e) == type('m'): return (1, {e: 1}) # <--- base case common mistakes: {e:1} if type(e) == type(()): if e[0] == '+': return add(eval(e[1]), eval(e[2])) if e[0] == '-': return sub(eval(e[1]), eval(e[2])) if e[0] == '*': return mul(eval(e[1]), eval(e[2])) if e[0] == '/': return div(eval(e[1]), eval(e[2])) if e[0] == '^': return pow(eval(e[1]), eval(e[2])) def add(v1, v2): n1, u1 = v1 n2, u2 = v2 if u1 != u2: raise Exception("Adding incompatible units") return (n1 + n2, u1) def sub(v1, v2): n1, u1 = v1 n2, u2 = v2 if u1 != u2: raise Exception("Adding incompatible units") return (n1 - n2, u1) def mul(v1, v2): n1, u1 = v1 n2, u2 = v2 return (n1 * n2, mulUnits(u1, u2)) def div(v1, v2): n1, u1 = v1 n2, u2 = v2 return (n1 / n2, divUnits(u1, u2)) def pow(v1, v2): n1, u1 = v1 n2, u2 = v2 if u2 != {}: raise Exception("Exponent must be a unit-less value") return (n1 ** n2, powUnits(u1, n2)) def canonize(u): return dict([(s, u[s]) for s in u if u[s] != 0]) def mulUnits(u1, u2): u = u1.copy() for s in u2: if s in u: u[s] = u[s] + u2[s] else: u[s] = u2[s] return canonize(u) def divUnits(u1, u2): u = u1.copy() for s in u2: if s in u: u[s] = u[s] - u2[s] else: u[s] = -u2[s] return canonize(u) def powUnits(u, n): return canonize(dict([(s, u[s] * n) for s in u.keys()])) p6 = 'm' # m p7 = ('+', 'm', 'm') # m+m p8_ = ('+', 'm', 's') # m+s ERROR p9 = ('^', ('*', 2, 'm'), 2) # (2 m)^2 p10 = ('*', ('*', 1, 'm'), ('^', 's', -1)) # 1 m * s^(-1) p11 = ('/', 'm', 's') # m/s p12 = ('/', ('*', 1, 'm'), ('^', 's', 1)) # 1 m / s^1 p13 = ('/', p12, p11) p14 = ('+', 1, ('/', 'm', 'm')) def runTests(tests): for p in tests: print("%s \t--> %s" % (p, eval(p))) def runFailedTests(tests): for p in tests: try: eval(p) except Exception: print("OK") runTests((p6, p7, p9, p10, p11, p12, p13, p14)) runFailedTests((p8_,))
PetRescue/pr-pin
spec/support/test_namespace.rb
<reponame>PetRescue/pr-pin<filename>spec/support/test_namespace.rb<gh_stars>1-10 # Namespace holding all objects created during specs module Test def self.remove_constants constants.each(&method(:remove_const)) end end
EAndresen/aws-cdk-go
awscdk/awskinesisanalyticsflink/internal/types.go
package internal import ( "github.com/aws/aws-cdk-go/awscdk/v2" "github.com/aws/aws-cdk-go/awscdk/v2/awsiam" ) type Type__awscdkResource = awscdk.Resource type Type__awsiamIGrantable = awsiam.IGrantable type Type__awscdkIResource = awscdk.IResource
paveloom-p/P3
ARPREC/arprec-2.2.13/tests/quad-ts.cpp
/* * quad-ts.cpp * * This work was supported by the Director, Office of Science, Division * of Mathematical, Information, and Computational Sciences of the * U.S. Department of Energy under contract number DE-AC03-76SF00098. * * Copyright (c) 2004 * * * Creation Date: June 7, 2004 * Modified: June 27, 2004 * Version: 1.0 */ //Implementation of QuadTS #include <cmath> #include <iostream> #include "quad-ts.h" #include "util.h" using std::cout; using std::endl; //Static Variable initialization int QuadTS::_phasesMax = 0; long int QuadTS::_AbWtSize = 0; long int QuadTS::_AbWtMax = 0; long int QuadTS::_quadTSCount = 0; mp_real* QuadTS::_abscissas = NULL; mp_real* QuadTS::_weight = NULL; int QuadTS::_precWord1 = 0; int QuadTS::_precWord2 = 0; int QuadTS::_sneps1 = 0; int QuadTS::_sneps2 = 0; /* * ABSTRACT FUNCTION * Resizes the Abscissas and weight arrays * * This subroutine initializes the quadrature arays xk and wk using the * function x(t) = tanh (pi/2*sinh(t)). The argument nq2 is the space * allocated for wk and xk in the calling program. By default it is set to * 12 * 2^nq1. Increase nq2 if directed by a message produced below. * Upon completion, wk(-1) = nq1, and xk(-1) = n, the maximum space parameter * for these arrays. In other words, the arrays occupy (wk(i), i = -1 to n) * and (xk(i), i = -1 to n), where n = xk(-1). The array x_k contain 1 minus * the abscissas; the wk array are the weights at these abscissas. */ void QuadTS::reSizeAbWt(long int AbWtSize) { if(_phasesMax < _phases) _phasesMax = _phases; if(AbWtSize < 12*2) { AbWtSize = 12; //AbWtSize = 8; for(int i=0; i<_phasesMax; i++) AbWtSize *= 2; } //Need to recompute everything as precesion has been increased if(_precWord1 < _precWd1){ _AbWtSize = _AbWtMax= 0; _precWord1 = _precWd1; _sneps1 = _neps1; if(_abscissas) { delete [] _abscissas; delete [] _weight; _abscissas = _weight = NULL; } } //There is no need to inc to the given Size if((_precWord2 >= _precWd2 || _AbWtSize == _AbWtMax) && _AbWtSize >= AbWtSize) return; if(_precWord2 < _precWd2){ _precWord2 = _precWd2; _sneps2 = _neps2; } mp::mpsetprecwords(_precWord1); //increasing the table Maxsize only if it is bigger than the //current table Maxsize if(_AbWtMax < AbWtSize) { _AbWtMax = AbWtSize; mp_real* ta = new mp_real[_AbWtMax]; //abscissas mp_real* tw = new mp_real[_AbWtMax]; //weight if(!(ta && tw)) { cout << "QuadTS::reSizeAbWt: Not enough memory" <<endl; exit(0); } //Copying the values from the old array to the new for(long int i = 0; i < _AbWtSize; i++) { ta[i] = _abscissas[i]; tw[i] = _weight[i]; } //Making sure that arrays are not NULL //if one is null other must be null as they are the same size if(_abscissas) { delete [] _abscissas; delete [] _weight; } _abscissas = ta; _weight = tw; } if (_ndebug >= 1) cout <<"QuadTS::reSizeAbWt: Tanh-sinh quadrature initialization" <<endl; /* * Some inits before actual computation of tables begin * */ mp::mpsetprecwords(_precWord2); mp_real eps2 = pow(mp_real(10.0), _sneps2); mp::mpsetprecwords(_precWord1); mp_real p2(mp_real::_pi/2), t1,t2,t3,t4, u1,u2; double h = std::ldexp(1.0, -_phasesMax); #define IPRINT 1000 for(int k=_AbWtSize; k < _AbWtMax; k++) { // xk(k) = 1 - tanh (u1) = 1 /(e^u1 * cosh (u1)) // wk(k) = u2 / cosh (u1)^2 // where u1 = pi/2 * cosh (t1), u2 = pi/2 * sinh (t1) t1 = k*h; t2 = exp(t1); u1 = .5*p2*(t2 + 1.0/t2); u2 = .5*p2*(t2 - 1.0/t2); t3 = exp(u2); t4 = .5*(t3 + 1.0/t3); _abscissas[k] = 1.0/(t3*t4); _weight[k] = u1/(t4*t4); if(_ndebug >= 2 && k%IPRINT == 0) { cout<<"\t\t"<<k<<"\t"<<_AbWtMax<<endl; //cout<<"abscissas[i]=\n"<<_abscissas[k] //<<"weight[i]=\n"<<_weight[k]; } if(isThereErrorMPIER(_ierror)) { _nerror = _ierror + 100; cout <<"reSizeAbWt: Error in quadratore initialization; code" << _nerror <<endl; return; } if(_weight[k] < eps2) { _AbWtSize = k; if(_ndebug >= 2) cout <<"reSizeAbWt: Tabale space used = " << _AbWtSize << endl; return; } } cout<< "reSizeAbWt: Table space parameter is too small; value =" << _AbWtMax <<endl; _AbWtSize = _AbWtMax; } mp_real QuadTS::integrate(mp_real func(const mp_real &x), const mp_real &x1, const mp_real &x2) { static const int izx = 5; int outprec = mp::mpgetoutputprec(); mp::mpsetoutputprec(56); mp::mpsetprecwords(_precWd2); mp_real ax = .5*(x2-x1); mp_real bx = .5*(x2+x1); mp::mpsetprecwords(_precWd1); mp_real sum(0.0),s1(0.0),s2(0.0),s3(0.0), fm1(0.0),fm2(0.0), err(0.0), h(1.0), xx1, xx2, eps1, eps2, t1, t2, tw1, tw2; const mp_real c10(10.0); const mp_real ceps1(pow(c10, _neps1)); const mp_real ceps2(pow(c10, _neps2)); int phases = _phases+1, k1, k2, iz1,iz2; int *ip = new int[phases]; //Creating a table to minimize computation ip[0] = 1; for(int i=1; i < phases; i++) ip[i] = 2*ip[i-1]; for(long int k = 1; k <= _phases; k++) { h *= .5; s3 = s2; s2 = s1; fm1 = 0.0; fm2 = 0.0; k1 = ip[_phases-k]; k2 = ip[_phases-k+1]; iz1 = iz2 = 0; //Evaluate function at level k in x, avoiding unnecessary computation. for(long int i = 0; i < _AbWtSize; i += k1) { if(i%k2 != 0 || k == 1) { mp::mpsetprecwords(_precWord2); mp_real xt1(1.0 - _abscissas[i]); mp_real xx1(bx - ax*xt1); mp_real xx2(bx + ax*xt1); bool log1 = xx1 > x1 && iz1 < izx; bool log2 = xx2 < x2 && iz2 < izx; mp::mpsetprecwords(_precWord1); if(!log1 && !log2) break; if(log1) { t1 = func(xx1); if(isThereErrorMPIER(_ierror) || _nerror > 0) { if(_ierror > 0) _nerror = 100 + _ierror; cout << "QuadTS::integrate: Error in quadrature" "calculation; code =" << _nerror << endl; mp::mpsetoutputprec(outprec); delete ip; return mp_real(0.0); } tw1 = t1*_weight[i]; if(abs(tw1) < ceps1) iz1++; else iz1 = 0; }else { t1 = 0.0; tw1 = 0.0; } if(i > 0 && log2) { t2 = func(xx2); if(isThereErrorMPIER(_ierror) || _nerror > 0) { if(_ierror > 0) _nerror = 100 + _ierror; cout << "QuadTS::integrate: Error in quadrature" "calculation; code =" << _nerror << endl; mp::mpsetoutputprec(outprec); delete ip; return mp_real(0.0); } tw2 = t2*_weight[i]; if(abs(tw2) < ceps1) iz2++; else iz2 = 0; }else { t2 = 0.0; tw2 = 0.0; } sum += tw1 + tw2; tw1 = abs(tw1); tw2 = abs(tw2); t1 = abs(t1); t2 = abs(t2); fm1 = std::max(fm1, tw1); fm1 = std::max(fm1, tw2); fm2 = std::max(fm2, t1); fm2 = std::max(fm2, t2); } } s1 = ax*h*sum; mp_real eps1(fm1*ceps1); mp_real eps2(fm2*ceps2); double d1 = dplog10q(abs(s1 - s2)); double d2 = dplog10q(abs(s1 - s3)); double d3 = dplog10q(eps1) - 1; double d4 = dplog10q(eps2) - 1; if(k <= 2) err = 1.0; else if(d1 == -9999.0) err = 0.0; else { double val1 = d1*d1/d2; double val2 = 2*d1; double max = std::max(val1,val2); max = std::max(max,d3); max = std::max(max,d4); err = pow(c10, nint(std::min(0.0,max))); } if(_ndebug >= 2) cout << "QuadTS::integrate: Iteration=" << k << " of " << _phases << "; est error = 10^" << nint(dble(dplog10q(abs(err)))) << "; approx value = " << s1 << endl; if(k > 3 && err < eps1){ mp::mpsetoutputprec(outprec); delete ip; return s1; } if(k >= 3 && err < eps2) { cout << "QuadTS::integrate: Estimated error = 10^" << nint(dble(dplog10q(abs(err)))) << "\n" << "Increase secondary prec (Ndigits2) for greater accuracy. " << "Current value =" << -_neps2 << endl; mp::mpsetoutputprec(outprec); delete ip; return s1; } } cout << "QuadTS::integrate: Estimated error = 10^" << nint(dble(dplog10q(abs(err)))) << "\n" << "Increae QuadLevel for greater accuracy. " << "Current value =" << _phases << endl; mp::mpsetoutputprec(outprec); delete ip; return s1; }
aiden-liu413/Lreport
common/src/main/java/com/kxingyi/common/util/feign/FeignClientRegistrar.java
<filename>common/src/main/java/com/kxingyi/common/util/feign/FeignClientRegistrar.java package com.kxingyi.common.util.feign; import feign.Feign; import feign.Request; import feign.Target; import org.springframework.beans.BeansException; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.annotation.AnnotatedBeanDefinition; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionReaderUtils; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.context.EnvironmentAware; import org.springframework.context.ResourceLoaderAware; import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; import org.springframework.core.env.Environment; import org.springframework.core.io.ResourceLoader; import org.springframework.core.type.AnnotationMetadata; import org.springframework.core.type.filter.AnnotationTypeFilter; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import java.util.Map; import java.util.Set; /** * @author hujie * @date 2020/5/14 17:07 * 扫描带{@link FeignClient}的接口注入spring容器 **/ public class FeignClientRegistrar implements ImportBeanDefinitionRegistrar, ResourceLoaderAware, EnvironmentAware { private ResourceLoader resourceLoader; private Environment environment; @Override public void setEnvironment(Environment environment) { this.environment = environment; } @Override public void setResourceLoader(ResourceLoader resourceLoader) { this.resourceLoader = resourceLoader; } @Override public void registerBeanDefinitions(AnnotationMetadata metadata, BeanDefinitionRegistry registry) { ClassPathScanningCandidateComponentProvider scanner = new FeignClientScanner(); scanner.setResourceLoader(resourceLoader); AnnotationTypeFilter annotationTypeFilter = new AnnotationTypeFilter(FeignClient.class); scanner.addIncludeFilter(annotationTypeFilter); /** 从{@link EnableFeignClient 注解的包下开始扫描}*/ String scanPackageName = ClassUtils.getPackageName(metadata.getClassName()); Set<BeanDefinition> candidateComponents = scanner.findCandidateComponents(scanPackageName); for (BeanDefinition candidateComponent : candidateComponents) { if (candidateComponent instanceof AnnotatedBeanDefinition) { AnnotatedBeanDefinition beanDefinition = (AnnotatedBeanDefinition) candidateComponent; AnnotationMetadata annotationMetadata = beanDefinition.getMetadata(); Assert.isTrue(annotationMetadata.isInterface(), "@FeignClient只能作用于接口上"); Map<String, Object> attributes = annotationMetadata.getAnnotationAttributes(FeignClient.class.getCanonicalName()); BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(FeignClientFactoryBean.class); String className = annotationMetadata.getClassName(); definition.addPropertyValue("config", attributes.get("configuration")); definition.addPropertyValue("type", className); BeanDefinitionHolder newHolder = new BeanDefinitionHolder(definition.getBeanDefinition(), className, null); BeanDefinitionReaderUtils.registerBeanDefinition(newHolder, registry); } } } } class FeignClientScanner extends ClassPathScanningCandidateComponentProvider { @Override protected boolean isCandidateComponent(AnnotatedBeanDefinition beanDefinition) { boolean isCandidate = false; if (beanDefinition.getMetadata().isIndependent()) { if (!beanDefinition.getMetadata().isAnnotation()) { isCandidate = true; } } return isCandidate; } } class FeignClientFactoryBean implements FactoryBean<Object>, ApplicationContextAware { private Class<?> type; private Class<? extends FeignClientConfiguration> config; private ApplicationContext applicationContext; @Override public Object getObject() { return configureFeign(); } @Override public Class<?> getObjectType() { return this.type; } private <T> T configureFeign() { FeignClientConfiguration configuration = applicationContext.getBean(config); if (configuration == null) { throw new RuntimeException("spring中没有" + config.toString() + "类型的bean"); } return (T) Feign.builder() .client(configuration.client()) .encoder(configuration.encoder()) .options(new Request.Options(1000 * 10, 1000 * 10)) .decoder(configuration.decoder()) .retryer(configuration.retryer()) .requestInterceptors(configuration.requestInterceptor()) .target(new Target.HardCodedTarget<>(type, configuration.urlPre())); } @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { this.applicationContext = applicationContext; } public Class<?> getType() { return type; } public void setType(Class<?> type) { this.type = type; } public Class<? extends FeignClientConfiguration> getConfig() { return config; } public void setConfig(Class<? extends DefaultFeignClientConfiguration> config) { this.config = config; } }
upeshpv/fluent-fluentd
lib/fluent/plugin/output.rb
# # Fluentd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require 'fluent/plugin/base' require 'fluent/log' require 'fluent/plugin_id' require 'fluent/plugin_helper' require 'fluent/timezone' require 'fluent/unique_id' require 'time' require 'monitor' module Fluent module Plugin class Output < Base include PluginId include PluginLoggerMixin include PluginHelper::Mixin include UniqueId::Mixin helpers :thread, :retry_state CHUNK_KEY_PATTERN = /^[-_.<KEY> CHUNK_KEY_PLACEHOLDER_PATTERN = /\$\{[-_.<KEY> CHUNKING_FIELD_WARN_NUM = 4 config_param :time_as_integer, :bool, default: false # `<buffer>` and `<secondary>` sections are available only when '#format' and '#write' are implemented config_section :buffer, param_name: :buffer_config, init: true, required: false, multi: false, final: true do config_argument :chunk_keys, :array, value_type: :string, default: [] config_param :@type, :string, default: 'memory', alias: :type config_param :timekey, :time, default: nil # range size to be used: `time.to_i / @timekey` config_param :timekey_wait, :time, default: 600 # These are for #extract_placeholders config_param :timekey_use_utc, :bool, default: false # default is localtime config_param :timekey_zone, :string, default: Time.now.strftime('%z') # e.g., "-0700" or "Asia/Tokyo" desc 'If true, plugin will try to flush buffer just before shutdown.' config_param :flush_at_shutdown, :bool, default: nil # change default by buffer_plugin.persistent? desc 'How to enqueue chunks to be flushed. "interval" flushes per flush_interval, "immediate" flushes just after event arrival.' config_param :flush_mode, :enum, list: [:default, :lazy, :interval, :immediate], default: :default config_param :flush_interval, :time, default: 60, desc: 'The interval between buffer chunk flushes.' config_param :flush_thread_count, :integer, default: 1, desc: 'The number of threads to flush the buffer.' config_param :flush_thread_interval, :float, default: 1.0, desc: 'Seconds to sleep between checks for buffer flushes in flush threads.' config_param :flush_thread_burst_interval, :float, default: 1.0, desc: 'Seconds to sleep between flushes when many buffer chunks are queued.' config_param :delayed_commit_timeout, :time, default: 60, desc: 'Seconds of timeout for buffer chunks to be committed by plugins later.' config_param :overflow_action, :enum, list: [:throw_exception, :block, :drop_oldest_chunk], default: :throw_exception, desc: 'The action when the size of buffer exceeds the limit.' config_param :retry_forever, :bool, default: false, desc: 'If true, plugin will ignore retry_timeout and retry_max_times options and retry flushing forever.' config_param :retry_timeout, :time, default: 72 * 60 * 60, desc: 'The maximum seconds to retry to flush while failing, until plugin discards buffer chunks.' # 72hours == 17 times with exponential backoff (not to change default behavior) config_param :retry_max_times, :integer, default: nil, desc: 'The maximum number of times to retry to flush while failing.' config_param :retry_secondary_threshold, :float, default: 0.8, desc: 'ratio of retry_timeout to switch to use secondary while failing.' # expornential backoff sequence will be initialized at the time of this threshold desc 'How to wait next retry to flush buffer.' config_param :retry_type, :enum, list: [:exponential_backoff, :periodic], default: :exponential_backoff ### Periodic -> fixed :retry_wait ### Exponencial backoff: k is number of retry times # c: constant factor, @retry_wait # b: base factor, @retry_exponential_backoff_base # k: times # total retry time: c + c * b^1 + (...) + c*b^k = c*b^(k+1) - 1 config_param :retry_wait, :time, default: 1, desc: 'Seconds to wait before next retry to flush, or constant factor of exponential backoff.' config_param :retry_exponential_backoff_base, :float, default: 2, desc: 'The base number of exponencial backoff for retries.' config_param :retry_max_interval, :time, default: nil, desc: 'The maximum interval seconds for exponencial backoff between retries while failing.' config_param :retry_randomize, :bool, default: true, desc: 'If true, output plugin will retry after randomized interval not to do burst retries.' end config_section :secondary, param_name: :secondary_config, required: false, multi: false, final: true do config_param :@type, :string, default: nil, alias: :type config_section :buffer, required: false, multi: false do # dummy to detect invalid specification for here end config_section :secondary, required: false, multi: false do # dummy to detect invalid specification for here end end def process(tag, es) raise NotImplementedError, "BUG: output plugins MUST implement this method" end def write(chunk) raise NotImplementedError, "BUG: output plugins MUST implement this method" end def try_write(chunk) raise NotImplementedError, "BUG: output plugins MUST implement this method" end def format(tag, time, record) # standard msgpack_event_stream chunk will be used if this method is not implemented in plugin subclass raise NotImplementedError, "BUG: output plugins MUST implement this method" end def prefer_buffered_processing # override this method to return false only when all of these are true: # * plugin has both implementation for buffered and non-buffered methods # * plugin is expected to work as non-buffered plugin if no `<buffer>` sections specified true end def prefer_delayed_commit # override this method to decide which is used of `write` or `try_write` if both are implemented true end # Internal states FlushThreadState = Struct.new(:thread, :next_time) DequeuedChunkInfo = Struct.new(:chunk_id, :time, :timeout) do def expired? time + timeout < Time.now end end attr_reader :as_secondary, :delayed_commit, :delayed_commit_timeout attr_reader :num_errors, :emit_count, :emit_records, :write_count, :rollback_count # for tests attr_reader :buffer, :retry, :secondary, :chunk_keys, :chunk_key_time, :chunk_key_tag attr_accessor :output_enqueue_thread_waiting, :in_tests # output_enqueue_thread_waiting: for test of output.rb itself # in_tests: for tests of plugins with test drivers def initialize super @counters_monitor = Monitor.new @buffering = false @delayed_commit = false @as_secondary = false @in_tests = false @primary_instance = nil # TODO: well organized counters @num_errors = 0 @emit_count = 0 @emit_records = 0 @write_count = 0 @rollback_count = 0 # How to process events is decided here at once, but it will be decided in delayed way on #configure & #start if implement?(:synchronous) if implement?(:buffered) || implement?(:delayed_commit) @buffering = nil # do #configure or #start to determine this for full-featured plugins else @buffering = false end else @buffering = true end @custom_format = implement?(:custom_format) @buffer = nil @secondary = nil @retry = nil @dequeued_chunks = nil @output_flush_threads = nil @simple_chunking = nil @chunk_keys = @chunk_key_time = @chunk_key_tag = nil @flush_mode = nil end def acts_as_secondary(primary) @as_secondary = true @primary_instance = primary (class << self; self; end).module_eval do define_method(:extract_placeholders){ |str, metadata| @primary_instance.extract_placeholders(str, metadata) } define_method(:commit_write){ |chunk_id| @primary_instance.commit_write(chunk_id, delayed: delayed_commit, secondary: true) } define_method(:rollback_write){ |chunk_id| @primary_instance.rollback_write(chunk_id) } end end def configure(conf) unless implement?(:synchronous) || implement?(:buffered) || implement?(:delayed_commit) raise "BUG: output plugin must implement some methods. see developer documents." end has_buffer_section = (conf.elements(name: 'buffer').size > 0) super if has_buffer_section unless implement?(:buffered) || implement?(:delayed_commit) raise Fluent::ConfigError, "<buffer> section is configured, but plugin '#{self.class}' doesn't support buffering" end @buffering = true else # no buffer sections if implement?(:synchronous) if !implement?(:buffered) && !implement?(:delayed_commit) if @as_secondary raise Fluent::ConfigError, "secondary plugin '#{self.class}' must support buffering, but doesn't." end @buffering = false else if @as_secondary # secondary plugin always works as buffered plugin without buffer instance @buffering = true else # @buffering.nil? shows that enabling buffering or not will be decided in lazy way in #start @buffering = nil end end else # buffered or delayed_commit is supported by `unless` of first line in this method @buffering = true end end if @as_secondary if !@buffering && !@buffering.nil? raise Fluent::ConfigError, "secondary plugin '#{self.class}' must support buffering, but doesn't" end end if (@buffering || @buffering.nil?) && !@as_secondary # When @buffering.nil?, @buffer_config was initialized with default value for all parameters. # If so, this configuration MUST success. @chunk_keys = @buffer_config.chunk_keys.dup @chunk_key_time = !!@chunk_keys.delete('time') @chunk_key_tag = !!@chunk_keys.delete('tag') if @chunk_keys.any?{ |key| key !~ CHUNK_KEY_PATTERN } raise Fluent::ConfigError, "chunk_keys specification includes invalid char" end if @chunk_key_time raise Fluent::ConfigError, "<buffer ...> argument includes 'time', but timekey is not configured" unless @buffer_config.timekey Fluent::Timezone.validate!(@buffer_config.timekey_zone) @buffer_config.timekey_zone = '+0000' if @buffer_config.timekey_use_utc @output_time_formatter_cache = {} end if (@chunk_key_tag ? 1 : 0) + @chunk_keys.size >= CHUNKING_FIELD_WARN_NUM log.warn "many chunk keys specified, and it may cause too many chunks on your system." end # no chunk keys or only tags (chunking can be done without iterating event stream) @simple_chunking = !@chunk_key_time && @chunk_keys.empty? @flush_mode = @buffer_config.flush_mode if @flush_mode == :default @flush_mode = (@chunk_key_time ? :lazy : :interval) end buffer_type = @buffer_config[:@type] buffer_conf = conf.elements(name: 'buffer').first || Fluent::Config::Element.new('buffer', '', {}, []) @buffer = Plugin.new_buffer(buffer_type, parent: self) @buffer.configure(buffer_conf) @flush_at_shutdown = @buffer_config.flush_at_shutdown if @flush_at_shutdown.nil? @flush_at_shutdown = if @buffer.persistent? false else true # flush_at_shutdown is true in default for on-memory buffer end elsif !@flush_at_shutdown && !@buffer.persistent? buf_type = Plugin.lookup_type_from_class(@buffer.class) log.warn "'flush_at_shutdown' is false, and buffer plugin '#{buf_type}' is not persistent buffer." log.warn "your configuration will lose buffered data at shutdown. please confirm your configuration again." end end if @secondary_config raise Fluent::ConfigError, "Invalid <secondary> section for non-buffered plugin" unless @buffering raise Fluent::ConfigError, "<secondary> section cannot have <buffer> section" if @secondary_config.buffer raise Fluent::ConfigError, "<secondary> section cannot have <secondary> section" if @secondary_config.secondary raise Fluent::ConfigError, "<secondary> section and 'retry_forever' are exclusive" if @buffer_config.retry_forever secondary_type = @secondary_config[:@type] unless secondary_type secondary_type = conf['@type'] # primary plugin type end secondary_conf = conf.elements(name: 'secondary').first @secondary = Plugin.new_output(secondary_type) @secondary.acts_as_secondary(self) @secondary.configure(secondary_conf) @secondary.router = router if @secondary.has_router? if (self.class != @secondary.class) && (@custom_format || @secondary.implement?(:custom_format)) log.warn "secondary type should be same with primary one", primary: self.class.to_s, secondary: @secondary.class.to_s end else @secondary = nil end self end def start super if @buffering.nil? @buffering = prefer_buffered_processing if !@buffering && @buffer @buffer.terminate # it's not started, so terminate will be enough end end if @buffering m = method(:emit_buffered) (class << self; self; end).module_eval do define_method(:emit_events, m) end @custom_format = implement?(:custom_format) @delayed_commit = if implement?(:buffered) && implement?(:delayed_commit) prefer_delayed_commit else implement?(:delayed_commit) end @delayed_commit_timeout = @buffer_config.delayed_commit_timeout else # !@buffering m = method(:emit_sync) (class << self; self; end).module_eval do define_method(:emit_events, m) end end if @buffering && !@as_secondary @retry = nil @retry_mutex = Mutex.new @buffer.start @output_flush_threads = [] @output_flush_threads_mutex = Mutex.new @output_flush_threads_running = true # mainly for test: detect enqueue works as code below: # @output.interrupt_flushes # # emits # @output.enqueue_thread_wait @output_flush_interrupted = false @output_enqueue_thread_mutex = Mutex.new @output_enqueue_thread_waiting = false @dequeued_chunks = [] @dequeued_chunks_mutex = Mutex.new @buffer_config.flush_thread_count.times do |i| thread_title = "flush_thread_#{i}".to_sym thread_state = FlushThreadState.new(nil, nil) thread = thread_create(thread_title) do flush_thread_run(thread_state) end thread_state.thread = thread @output_flush_threads_mutex.synchronize do @output_flush_threads << thread_state end end @output_flush_thread_current_position = 0 unless @in_tests if @flush_mode == :interval || @chunk_key_time thread_create(:enqueue_thread, &method(:enqueue_thread_run)) end end end @secondary.start if @secondary end def stop @secondary.stop if @secondary @buffer.stop if @buffering && @buffer super end def before_shutdown @secondary.before_shutdown if @secondary if @buffering && @buffer if @flush_at_shutdown force_flush end @buffer.before_shutdown end super end def shutdown @secondary.shutdown if @secondary @buffer.shutdown if @buffering && @buffer super end def after_shutdown try_rollback_all if @buffering && !@as_secondary # rollback regardless with @delayed_commit, because secondary may do it @secondary.after_shutdown if @secondary if @buffering && @buffer @buffer.after_shutdown @output_flush_threads_running = false if @output_flush_threads && !@output_flush_threads.empty? @output_flush_threads.each do |state| state.thread.run if state.thread.alive? # to wakeup thread and make it to stop by itself end @output_flush_threads.each do |state| state.thread.join end end end super end def close @buffer.close if @buffering && @buffer @secondary.close if @secondary super end def terminate @buffer.terminate if @buffering && @buffer @secondary.terminate if @secondary super end def support_in_v12_style?(feature) # for plugins written in v0.12 styles case feature when :synchronous then false when :buffered then false when :delayed_commit then false when :custom_format then false else raise ArgumentError, "unknown feature: #{feature}" end end def implement?(feature) methods_of_plugin = self.class.instance_methods(false) case feature when :synchronous then methods_of_plugin.include?(:process) || support_in_v12_style?(:synchronous) when :buffered then methods_of_plugin.include?(:write) || support_in_v12_style?(:buffered) when :delayed_commit then methods_of_plugin.include?(:try_write) when :custom_format then methods_of_plugin.include?(:format) || support_in_v12_style?(:custom_format) else raise ArgumentError, "Unknown feature for output plugin: #{feature}" end end # TODO: optimize this code def extract_placeholders(str, metadata) if metadata.timekey.nil? && metadata.tag.nil? && metadata.variables.nil? str else rvalue = str # strftime formatting if @chunk_key_time # this section MUST be earlier than rest to use raw 'str' @output_time_formatter_cache[str] ||= Fluent::Timezone.formatter(@buffer_config.timekey_zone, str) rvalue = @output_time_formatter_cache[str].call(metadata.timekey) end # ${tag}, ${tag[0]}, ${tag[1]}, ... if @chunk_key_tag if str =~ /\$\{tag\[\d+\]\}/ hash = {'${tag}' => metadata.tag} metadata.tag.split('.').each_with_index do |part, i| hash["${tag[#{i}]}"] = part end rvalue = rvalue.gsub(/\$\{tag(\[\d+\])?\}/, hash) elsif str.include?('${tag}') rvalue = rvalue.gsub('${tag}', metadata.tag) end end # ${a_chunk_key}, ... if !@chunk_keys.empty? && metadata.variables hash = {'${tag}' => '${tag}'} # not to erase this wrongly @chunk_keys.each do |key| hash["${#{key}}"] = metadata.variables[key.to_sym] end rvalue = rvalue.gsub(CHUNK_KEY_PLACEHOLDER_PATTERN, hash) end rvalue end end def emit_events(tag, es) # actually this method will be overwritten by #configure if @buffering emit_buffered(tag, es) else emit_sync(tag, es) end end def emit_sync(tag, es) @counters_monitor.synchronize{ @emit_count += 1 } begin process(tag, es) @counters_monitor.synchronize{ @emit_records += es.size } rescue @counters_monitor.synchronize{ @num_errors += 1 } raise end end def emit_buffered(tag, es) @counters_monitor.synchronize{ @emit_count += 1 } begin execute_chunking(tag, es, enqueue: (@flush_mode == :immediate)) if !@retry && @buffer.queued? submit_flush_once end rescue # TODO: separate number of errors into emit errors and write/flush errors @counters_monitor.synchronize{ @num_errors += 1 } raise end end # TODO: optimize this code def metadata(tag, time, record) # this arguments are ordered in output plugin's rule # Metadata 's argument order is different from this one (timekey, tag, variables) raise ArgumentError, "tag must be a String: #{tag.class}" unless tag.nil? || tag.is_a?(String) raise ArgumentError, "time must be a Fluent::EventTime (or Integer): #{time.class}" unless time.nil? || time.is_a?(Fluent::EventTime) || time.is_a?(Integer) raise ArgumentError, "record must be a Hash: #{record.class}" unless record.nil? || record.is_a?(Hash) if @chunk_keys.nil? && @chunk_key_time.nil? && @chunk_key_tag.nil? # for tests return Struct.new(:timekey, :tag, :variables).new end # timekey is int from epoch, and `timekey - timekey % 60` is assumed to mach with 0s of each minutes. # it's wrong if timezone is configured as one which supports leap second, but it's very rare and # we can ignore it (especially in production systems). if @chunk_keys.empty? if !@chunk_key_time && !@chunk_key_tag @buffer.metadata() elsif @chunk_key_time && @chunk_key_tag time_int = time.to_i timekey = (time_int - (time_int % @buffer_config.timekey)).to_i @buffer.metadata(timekey: timekey, tag: tag) elsif @chunk_key_time time_int = time.to_i timekey = (time_int - (time_int % @buffer_config.timekey)).to_i @buffer.metadata(timekey: timekey) else @buffer.metadata(tag: tag) end else timekey = if @chunk_key_time time_int = time.to_i (time_int - (time_int % @buffer_config.timekey)).to_i else nil end pairs = Hash[@chunk_keys.map{|k| [k.to_sym, record[k]]}] @buffer.metadata(timekey: timekey, tag: (@chunk_key_tag ? tag : nil), variables: pairs) end end def execute_chunking(tag, es, enqueue: false) if @simple_chunking handle_stream_simple(tag, es, enqueue: enqueue) elsif @custom_format handle_stream_with_custom_format(tag, es, enqueue: enqueue) else handle_stream_with_standard_format(tag, es, enqueue: enqueue) end end def write_guard(&block) begin block.call rescue Fluent::Plugin::Buffer::BufferOverflowError log.warn "failed to write data into buffer by buffer overflow" case @buffer_config.overflow_action when :throw_exception raise when :block log.debug "buffer.write is now blocking" until @buffer.storable? sleep 1 end log.debug "retrying buffer.write after blocked operation" retry when :drop_oldest_chunk begin oldest = @buffer.dequeue_chunk if oldest log.warn "dropping oldest chunk to make space after buffer overflow", chunk_id: oldest.unique_id @buffer.purge_chunk(oldest.unique_id) else log.error "no queued chunks to be dropped for drop_oldest_chunk" end rescue # ignore any errors end raise unless @buffer.storable? retry else raise "BUG: unknown overflow_action '#{@buffer_config.overflow_action}'" end end end FORMAT_MSGPACK_STREAM = ->(e){ e.to_msgpack_stream } FORMAT_MSGPACK_STREAM_TIME_INT = ->(e){ e.to_msgpack_stream(time_int: true) } # metadata_and_data is a Hash of: # (standard format) metadata => event stream # (custom format) metadata => array of formatted event # For standard format, formatting should be done for whole event stream, but # "whole event stream" may be a split of "es" here when it's bigger than chunk_limit_size. # `@buffer.write` will do this splitting. # For custom format, formatting will be done here. Custom formatting always requires # iteration of event stream, and it should be done just once even if total event stream size # is biggar than chunk_limit_size because of performance. def handle_stream_with_custom_format(tag, es, enqueue: false) meta_and_data = {} records = 0 es.each do |time, record| meta = metadata(tag, time, record) meta_and_data[meta] ||= [] meta_and_data[meta] << format(tag, time, record) records += 1 end write_guard do @buffer.write(meta_and_data, enqueue: enqueue) end @counters_monitor.synchronize{ @emit_records += records } true end def handle_stream_with_standard_format(tag, es, enqueue: false) format_proc = @time_as_integer ? FORMAT_MSGPACK_STREAM_TIME_INT : FORMAT_MSGPACK_STREAM meta_and_data = {} records = 0 es.each do |time, record| meta = metadata(tag, time, record) meta_and_data[meta] ||= MultiEventStream.new meta_and_data[meta].add(time, record) records += 1 end write_guard do @buffer.write(meta_and_data, format: format_proc, enqueue: enqueue) end @counters_monitor.synchronize{ @emit_records += records } true end def handle_stream_simple(tag, es, enqueue: false) format_proc = nil meta = metadata((@chunk_key_tag ? tag : nil), nil, nil) records = es.size if @custom_format records = 0 data = [] es.each do |time, record| data << format(tag, time, record) records += 1 end else format_proc = @time_as_integer ? FORMAT_MSGPACK_STREAM_TIME_INT : FORMAT_MSGPACK_STREAM data = es end write_guard do @buffer.write({meta => data}, format: format_proc, enqueue: enqueue) end @counters_monitor.synchronize{ @emit_records += records } true end def commit_write(chunk_id, delayed: @delayed_commit, secondary: false) log.trace "committing write operation to a chunk", chunk: dump_unique_id_hex(chunk_id), delayed: delayed if delayed @dequeued_chunks_mutex.synchronize do @dequeued_chunks.delete_if{ |info| info.chunk_id == chunk_id } end end @buffer.purge_chunk(chunk_id) @retry_mutex.synchronize do if @retry # success to flush chunks in retries if secondary log.warn "retry succeeded by secondary.", plugin_id: plugin_id, chunk_id: dump_unique_id_hex(chunk_id) else log.warn "retry succeeded.", plugin_id: plugin_id, chunk_id: dump_unique_id_hex(chunk_id) end @retry = nil end end end def rollback_write(chunk_id) # This API is to rollback chunks explicitly from plugins. # 3rd party plugins can depend it on automatic rollback of #try_rollback_write @dequeued_chunks_mutex.synchronize do @dequeued_chunks.delete_if{ |info| info.chunk_id == chunk_id } end # returns true if chunk was rollbacked as expected # false if chunk was already flushed and couldn't be rollbacked unexpectedly # in many cases, false can be just ignored if @buffer.takeback_chunk(chunk_id) @counters_monitor.synchronize{ @rollback_count += 1 } true else false end end def try_rollback_write @dequeued_chunks_mutex.synchronize do while @dequeued_chunks.first && @dequeued_chunks.first.expired? info = @dequeued_chunks.shift if @buffer.takeback_chunk(info.chunk_id) @counters_monitor.synchronize{ @rollback_count += 1 } log.warn "failed to flush the buffer chunk, timeout to commit.", plugin_id: plugin_id, chunk_id: dump_unique_id_hex(info.chunk_id), flushed_at: info.time end end end end def try_rollback_all return unless @dequeued_chunks @dequeued_chunks_mutex.synchronize do until @dequeued_chunks.empty? info = @dequeued_chunks.shift if @buffer.takeback_chunk(info.chunk_id) @counters_monitor.synchronize{ @rollback_count += 1 } log.info "delayed commit for buffer chunks was cancelled in shutdown", plugin_id: plugin_id, chunk_id: dump_unique_id_hex(info.chunk_id) end end end end def next_flush_time if @buffer.queued? @retry_mutex.synchronize do @retry ? @retry.next_time : Time.now + @buffer_config.flush_thread_burst_interval end else Time.now + @buffer_config.flush_thread_interval end end def try_flush chunk = @buffer.dequeue_chunk return unless chunk log.debug "trying flush for a chunk", chunk: dump_unique_id_hex(chunk.unique_id) output = self using_secondary = false if @retry_mutex.synchronize{ @retry && @retry.secondary? } output = @secondary using_secondary = true end unless @custom_format chunk.extend ChunkMessagePackEventStreamer end begin if output.delayed_commit log.trace "executing delayed write and commit", chunk: dump_unique_id_hex(chunk.unique_id) @counters_monitor.synchronize{ @write_count += 1 } output.try_write(chunk) @dequeued_chunks_mutex.synchronize do # delayed_commit_timeout for secondary is configured in <buffer> of primary (<secondary> don't get <buffer>) @dequeued_chunks << DequeuedChunkInfo.new(chunk.unique_id, Time.now, self.delayed_commit_timeout) end else # output plugin without delayed purge chunk_id = chunk.unique_id dump_chunk_id = dump_unique_id_hex(chunk_id) log.trace "adding write count", instance: self.object_id @counters_monitor.synchronize{ @write_count += 1 } log.trace "executing sync write", chunk: dump_chunk_id output.write(chunk) log.trace "write operation done, committing", chunk: dump_chunk_id commit_write(chunk_id, secondary: using_secondary) log.trace "done to commit a chunk", chunk: dump_chunk_id end rescue => e log.debug "taking back chunk for errors.", plugin_id: plugin_id, chunk: dump_unique_id_hex(chunk.unique_id) @buffer.takeback_chunk(chunk.unique_id) @retry_mutex.synchronize do if @retry @counters_monitor.synchronize{ @num_errors += 1 } if @retry.limit? records = @buffer.queued_records log.error "failed to flush the buffer, and hit limit for retries. dropping all chunks in the buffer queue.", plugin_id: plugin_id, retry_times: @retry.steps, records: records, error: e log.error_backtrace e.backtrace @buffer.clear_queue! log.debug "buffer queue cleared", plugin_id: plugin_id @retry = nil else @retry.step msg = if using_secondary "failed to flush the buffer with secondary output." else "failed to flush the buffer." end log.warn msg, plugin_id: plugin_id, retry_time: @retry.steps, next_retry: @retry.next_time, chunk: dump_unique_id_hex(chunk.unique_id), error: e log.warn_backtrace e.backtrace end else @retry = retry_state(@buffer_config.retry_randomize) @counters_monitor.synchronize{ @num_errors += 1 } log.warn "failed to flush the buffer.", plugin_id: plugin_id, retry_time: @retry.steps, next_retry: @retry.next_time, chunk: dump_unique_id_hex(chunk.unique_id), error: e log.warn_backtrace e.backtrace end end end end def retry_state(randomize) if @secondary retry_state_create( :output_retries, @buffer_config.retry_type, @buffer_config.retry_wait, @buffer_config.retry_timeout, forever: @buffer_config.retry_forever, max_steps: @buffer_config.retry_max_times, backoff_base: @buffer_config.retry_exponential_backoff_base, max_interval: @buffer_config.retry_max_interval, secondary: true, secondary_threshold: @buffer_config.retry_secondary_threshold, randomize: randomize ) else retry_state_create( :output_retries, @buffer_config.retry_type, @buffer_config.retry_wait, @buffer_config.retry_timeout, forever: @buffer_config.retry_forever, max_steps: @buffer_config.retry_max_times, backoff_base: @buffer_config.retry_exponential_backoff_base, max_interval: @buffer_config.retry_max_interval, randomize: randomize ) end end def submit_flush_once # Without locks: it is rough but enough to select "next" writer selection @output_flush_thread_current_position = (@output_flush_thread_current_position + 1) % @buffer_config.flush_thread_count state = @output_flush_threads[@output_flush_thread_current_position] state.next_time = 0 state.thread.run end def force_flush if @buffering @buffer.enqueue_all submit_flush_all end end def submit_flush_all while !@retry && @buffer.queued? submit_flush_once sleep @buffer_config.flush_thread_burst_interval end end # only for tests of output plugin def interrupt_flushes @output_flush_interrupted = true end # only for tests of output plugin def enqueue_thread_wait @output_enqueue_thread_mutex.synchronize do @output_flush_interrupted = false @output_enqueue_thread_waiting = true end require 'timeout' Timeout.timeout(10) do Thread.pass while @output_enqueue_thread_waiting end end # only for tests of output plugin def flush_thread_wakeup @output_flush_threads.each do |state| state.next_time = 0 state.thread.run end end def enqueue_thread_run value_for_interval = nil if @flush_mode == :interval value_for_interval = @buffer_config.flush_interval end if @chunk_key_time if !value_for_interval || @buffer_config.timekey < value_for_interval value_for_interval = @buffer_config.timekey end end unless value_for_interval raise "BUG: both of flush_interval and timekey are disabled" end interval = value_for_interval / 11.0 if interval < @buffer_config.flush_thread_interval interval = @buffer_config.flush_thread_interval end begin while @output_flush_threads_running now_int = Time.now.to_i if @output_flush_interrupted sleep interval next end @output_enqueue_thread_mutex.lock begin if @flush_mode == :interval flush_interval = @buffer_config.flush_interval.to_i # This block should be done by integer values. # If both of flush_interval & flush_thread_interval are 1s, expected actual flush timing is 1.5s. # If we use integered values for this comparison, expected actual flush timing is 1.0s. @buffer.enqueue_all{ |metadata, chunk| chunk.created_at.to_i + flush_interval <= now_int } end if @chunk_key_time timekey_unit = @buffer_config.timekey timekey_wait = @buffer_config.timekey_wait current_timekey = now_int - now_int % timekey_unit @buffer.enqueue_all{ |metadata, chunk| metadata.timekey < current_timekey && metadata.timekey + timekey_unit + timekey_wait <= now_int } end rescue => e log.error "unexpected error while checking flushed chunks. ignored.", plugin_id: plugin_id, error_class: e.class, error: e log.error_backtrace end @output_enqueue_thread_waiting = false @output_enqueue_thread_mutex.unlock sleep interval end rescue => e # normal errors are rescued by inner begin-rescue clause. log.error "error on enqueue thread", plugin_id: plugin_id, error_class: e.class, error: e log.error_backtrace raise end end def flush_thread_run(state) flush_thread_interval = @buffer_config.flush_thread_interval # If the given clock_id is not supported, Errno::EINVAL is raised. clock_id = Process::CLOCK_MONOTONIC rescue Process::CLOCK_MONOTONIC_RAW state.next_time = Process.clock_gettime(clock_id) + flush_thread_interval begin # This thread don't use `thread_current_running?` because this thread should run in `before_shutdown` phase while @output_flush_threads_running time = Process.clock_gettime(clock_id) interval = state.next_time - time if state.next_time <= time try_flush # next_flush_interval uses flush_thread_interval or flush_thread_burst_interval (or retrying) interval = next_flush_time.to_f - Time.now.to_f # TODO: if secondary && delayed-commit, next_flush_time will be much longer than expected (because @retry still exists) # @retry should be cleard if delayed commit is enabled? Or any other solution? state.next_time = Process.clock_gettime(clock_id) + interval end if @dequeued_chunks_mutex.synchronize{ !@dequeued_chunks.empty? && @dequeued_chunks.first.expired? } unless @output_flush_interrupted try_rollback_write end end sleep interval if interval > 0 end rescue => e # normal errors are rescued by output plugins in #try_flush # so this rescue section is for critical & unrecoverable errors log.error "error on output thread", plugin_id: plugin_id, error_class: e.class, error: e log.error_backtrace raise end end end end end
DapDeveloper/MK4duo
MK4duo/src/core/mechanics/core_mechanics.h
<reponame>DapDeveloper/MK4duo /** * MK4duo Firmware for 3D Printer, Laser and CNC * * Based on Marlin, Sprinter and grbl * Copyright (C) 2011 <NAME> / <NAME> * Copyright (C) 2019 <NAME> @MagoKimbra * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ /** * core_mechanics.h * * Copyright (C) 2019 <NAME> @MagoKimbra */ #pragma once // Struct Core Settings typedef struct : public generic_data_t { axis_limits_t base_pos[XYZ]; float base_home_pos[XYZ]; } mechanics_data_t; class Core_Mechanics: public Mechanics { public: /** Constructor */ Core_Mechanics() {} public: /** Public Parameters */ static mechanics_data_t data; public: /** Public Function */ /** * Initialize Factory parameters */ static void factory_parameters(); /** * Get the stepper positions in the cartesian_position[] array. * * The result is in the current coordinate space with * leveling applied. The coordinates need to be run through * unapply_leveling to obtain the "ideal" coordinates * suitable for current_position, etc. */ static void get_cartesian_from_steppers(); /** * Plan a move to (X, Y, Z) and set the current_position * The final current_position may not be the one that was requested */ static void do_blocking_move_to(const float rx, const float ry, const float rz, const float &fr_mm_s=0.0); static void do_blocking_move_to_x(const float &rx, const float &fr_mm_s=0.0); static void do_blocking_move_to_z(const float &rz, const float &fr_mm_s=0.0); static void do_blocking_move_to_xy(const float &rx, const float &ry, const float &fr_mm_s=0.0); FORCE_INLINE static void do_blocking_move_to(const float (&raw)[XYZ], const float &fr_mm_s=0.0) { do_blocking_move_to(raw[X_AXIS], raw[Y_AXIS], raw[Z_AXIS], fr_mm_s); } FORCE_INLINE static void do_blocking_move_to(const float (&raw)[XYZE], const float &fr_mm_s=0.0) { do_blocking_move_to(raw[X_AXIS], raw[Y_AXIS], raw[Z_AXIS], fr_mm_s); } /** * Home all axes according to settings */ static void home(const bool homeX=false, const bool homeY=false, const bool homeZ=false); /** * Home an individual linear axis */ static void do_homing_move(const AxisEnum axis, const float distance, const float fr_mm_s=0.0); /** * Prepare a linear move in a Cartesian setup. * * When a mesh-based leveling system is active, moves are segmented * according to the configuration of the leveling system. * * Returns true if current_position[] was set to destination[] */ static bool prepare_move_to_destination_mech_specific(); /** * Set an axis' current position to its home position (after homing). * * For Core robots this applies one-to-one when an * individual axis has been homed. * * Callers must sync the planner position after calling this! */ static void set_axis_is_at_home(const AxisEnum axis); static bool position_is_reachable(const float &rx, const float &ry); static bool position_is_reachable_by_probe(const float &rx, const float &ry); /** * Report current position to host */ static void report_current_position_detail(); /** * Print mechanics parameters in memory */ #if DISABLED(DISABLE_M503) static void print_parameters(); static void print_M92(); static void print_M201(); static void print_M203(); static void print_M204(); static void print_M205(); static void print_M206(); static void print_M228(); #endif #if HAS_NEXTION_LCD && ENABLED(NEXTION_GFX) static void Nextion_gfx_clear(); #endif private: /** Private Function */ /** * Home axis */ static void homeaxis(const AxisEnum axis); #if ENABLED(QUICK_HOME) static void quick_home_xy(); #endif #if ENABLED(Z_SAFE_HOMING) static void home_z_safely(); #endif #if ENABLED(DOUBLE_Z_HOMING) static void double_home_z(); #endif }; extern Core_Mechanics mechanics;
HaraDev001/IoT_NorthConnectedHomeBulb_Android
app/src/main/java/com/guohua/north_bulb/net/ThreadPool.java
package com.guohua.north_bulb.net; import android.os.Handler; import com.guohua.north_bulb.AppContext; import com.guohua.north_bulb.util.CodeUtils; import com.guohua.north_bulb.util.Constant; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * 此类用于顺序执行线程 如此使用: ThreadPool pool = ThreadPool.getInstance(); * pool.addTask(runnable);//顺序执行线程 pool.addOtherTask(runnable);//正常执行线程 * * @author Leo * @time 2015-11-04 */ public class ThreadPool { /* 單例 */ private volatile static ThreadPool pool = null; public static ThreadPool getInstance() { if (pool == null) { synchronized (ThreadPool.class) { if (pool == null) { pool = new ThreadPool(); } } } return pool; } private ExecutorService taskService;//Java线程池 public ThreadPool() { taskService = Executors.newSingleThreadExecutor(); // taskService = Executors.newCachedThreadPool();//无界限带缓冲的线程池 } /** * 添加任務 * * @param r */ public void addTask(Runnable r) { taskService.execute(r);//线程池执行 } public void addMusicOffTask(Runnable r) { taskService.execute(r);//线程池执行 new Handler().postDelayed(new Runnable() { @Override public void run() { AppContext.getInstance().sendAll(CodeUtils.transARGB2Protocol(CodeUtils.CMD_MODE_SWITCH, new Object[]{CodeUtils.SWITCH_CLOSE})); } }, Constant.HANDLERDELAY); } public void addOtherTask(Runnable r) { new Thread(r).start();//随意执行 } /** * 停止任務 */ public void stopTask() { taskService.shutdown();//停止线程池 } /** * 立刻停止任務 */ public void stopTaskNow() { taskService.shutdownNow();//立刻停止 } }
brakhin/job4j
3_hibernate_carseller/src/main/java/ru/bgbrakhi/carseller/servlets/AddDataController.java
<gh_stars>0 package ru.bgbrakhi.carseller.servlets; import org.apache.commons.fileupload.FileItem; import org.apache.commons.fileupload.disk.DiskFileItemFactory; import org.apache.commons.fileupload.servlet.ServletFileUpload; import ru.bgbrakhi.carseller.models.*; import ru.bgbrakhi.carseller.service.Validator; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.List; import java.util.Map; public class AddDataController extends HttpServlet { private final Validator logic = Validator.getInstance(); private static final long serialVersionUID = 1L; // location to store file uploaded private static final String UPLOAD_DIRECTORY = "image_upload"; // upload settings private static final int MEMORY_THRESHOLD = 1024 * 1024 * 3; // 3MB private static final int MAX_FILE_SIZE = 1024 * 1024 * 40; // 40MB private static final int MAX_REQUEST_SIZE = 1024 * 1024 * 50; // 50MB protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html"); // checks if the request actually contains upload file if (!ServletFileUpload.isMultipartContent(request)) { // if not, we stop here PrintWriter writer = response.getWriter(); writer.println("Error: Form must has enctype=multipart/form-data."); writer.flush(); return; } // configures upload settings DiskFileItemFactory factory = new DiskFileItemFactory(); // sets memory threshold - beyond which files are stored in disk factory.setSizeThreshold(MEMORY_THRESHOLD); // sets temporary location to store files factory.setRepository(new File(System.getProperty("java.io.tmpdir"))); ServletFileUpload upload = new ServletFileUpload(factory); // sets maximum size of upload file upload.setFileSizeMax(MAX_FILE_SIZE); // sets maximum size of request (include file + form data) upload.setSizeMax(MAX_REQUEST_SIZE); // constructs the directory path to store upload file // this path is relative to application's directory String uploadPath = getServletContext().getRealPath("") + File.separator + UPLOAD_DIRECTORY; // creates the directory if it does not exist File uploadDir = new File(uploadPath); if (!uploadDir.exists()) { uploadDir.mkdir(); } try { List<FileItem> formItems = upload.parseRequest(request); String fileName1 = ""; if (formItems != null && formItems.size() > 0) { Map<String, String> map = new HashMap<>(); for (FileItem item : formItems) { if (!item.isFormField()) { if (!item.getName().isEmpty()) { String fileName = new File(item.getName()).getName(); fileName1 += fileName; String filePath = uploadPath + File.separator + fileName; File storeFile = new File(filePath); item.write(storeFile); map.put("file", fileName1); } } else { String fieldname = item.getFieldName().toLowerCase(); String fieldvalue = new String(item.getString().getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8); if (fieldname.startsWith("ed")) { map.put(fieldname.substring(2), fieldvalue); } } } HttpSession session = request.getSession(); Car car = new Car(); CarModel carModel = new CarModel(); CarType carType = new CarType(); carType.setName(map.get("type")); CarMark carMark = new CarMark(); carMark.setName(map.get("mark")); City city = new City(); city.setName(map.get("city")); CarBody carBody = new CarBody(); carBody.setName(map.get("body")); carModel.setCartype(carType); carModel.setCarmark(carMark); carModel.setName(map.get("model")); car.setCity(city); car.setCarmodel(carModel); car.setCarbody(carBody); car.setYear(Integer.parseInt(map.get("year"))); car.setPrice(Integer.parseInt(map.get("price"))); car.setFilename("null".equals(map.get("file")) ? "" : map.get("file")); logic.getCar(new CarData((String) session.getAttribute("login"), map.get("city"), map.get("type"), map.get("mark"), map.get("model"), map.get("body"), Integer.parseInt(map.get("year")), Integer.parseInt(map.get("price")), "null".equals(map.get("file")) ? "" : map.get("file"))); } } catch (Exception e) { e.printStackTrace(); } request.getRequestDispatcher("mynotes.html").forward(request, response); } }
pattyolanterns/mbed-os
features/nanostack/sal-stack-nanostack/source/Security/PANA/pana_relay_table.c
/* * Copyright (c) 2013-2017, Arm Limited and affiliates. * SPDX-License-Identifier: Apache-2.0 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "nsconfig.h" #include "ns_types.h" #include "string.h" #include "Security/PANA/pana_relay_table.h" #include "nsdynmemLIB.h" #ifdef PANA static NS_LARGE relay_table_entry_t *relay_first_info = 0; void pana_relay_table_init(void) { /* Init Double linked Routing Table */ if (relay_first_info) { relay_table_entry_t *cur = 0; while (relay_first_info) { cur = relay_first_info; relay_first_info = cur->next_info; ns_dyn_mem_free(cur); } } } relay_table_entry_t *pana_relay_table_update(uint8_t *address) { relay_table_entry_t *ret_val = 0; relay_table_entry_t *cur = 0; if (relay_first_info) { cur = relay_first_info; while (cur) { if (memcmp((uint8_t *)cur->relay_address, (uint8_t *) address, 16) == 0) { /* General Update part */ cur->ttl = 20; ret_val = cur; break; } if (cur->next_info) { cur = cur->next_info; } else { break; } } } //allocate new if (ret_val == 0) { ret_val = (relay_table_entry_t *) ns_dyn_mem_alloc(sizeof(relay_table_entry_t)); if (ret_val) { if (cur != 0) { cur->next_info = ret_val; ret_val->prev_info = cur; } else { ret_val->prev_info = 0; } ret_val->next_info = 0; /* Copy Adrress IPv6 Address */ memcpy((uint8_t *)ret_val->relay_address, address, 16); ret_val->ttl = 20; if (relay_first_info == 0) { relay_first_info = ret_val; } } } return ret_val; } relay_table_entry_t *pana_relay_detect(uint8_t *address) { relay_table_entry_t *cur = 0; if (relay_first_info) { cur = relay_first_info; while (cur) { if (memcmp((uint8_t *)cur->relay_address, (uint8_t *) address, 16) == 0) { /* General Update part */ return cur; } cur = cur->next_info; } } return 0; } void pana_relay_ttl_update(uint16_t ttl_time) { relay_table_entry_t *cur = 0; if (relay_first_info) { relay_table_entry_t *prev = 0; relay_table_entry_t *next = 0; cur = relay_first_info; while (cur) { if (cur->ttl < ttl_time) { cur->ttl -= ttl_time; cur = cur->next_info; } else { prev = cur->prev_info; next = cur->next_info; if (prev) { //Not First if (next) { //Not Last prev->next_info = next; next->prev_info = prev; //Link next to new prev } else { //Last One prev->next_info = 0;/* New last entry */ } } else { //FIRST if (next == 0) { /* Last entry */ relay_first_info = 0; //Reset Route table } else { //New First relay_first_info = next; // Setup new First next->prev_info = 0; } } ns_dyn_mem_free(cur); cur = next; } } } } #endif
MadelineMurray/teku
networking/eth2/src/test/java/tech/pegasys/teku/networking/eth2/gossip/topics/BlockTopicHandlerTest.java
/* * Copyright 2019 ConsenSys AG. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package tech.pegasys.teku.networking.eth2.gossip.topics; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.google.common.eventbus.EventBus; import io.libp2p.core.pubsub.ValidationResult; import org.apache.tuweni.bytes.Bytes; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import tech.pegasys.teku.core.StateTransition; import tech.pegasys.teku.datastructures.blocks.SignedBeaconBlock; import tech.pegasys.teku.datastructures.state.ForkInfo; import tech.pegasys.teku.datastructures.util.DataStructureUtil; import tech.pegasys.teku.infrastructure.unsigned.UInt64; import tech.pegasys.teku.networking.eth2.gossip.encoding.GossipEncoding; import tech.pegasys.teku.networking.eth2.gossip.events.GossipedBlockEvent; import tech.pegasys.teku.networking.eth2.gossip.topics.validation.BlockValidator; import tech.pegasys.teku.ssz.SSZTypes.Bytes4; import tech.pegasys.teku.statetransition.BeaconChainUtil; import tech.pegasys.teku.storage.client.MemoryOnlyRecentChainData; import tech.pegasys.teku.storage.client.RecentChainData; public class BlockTopicHandlerTest { private final DataStructureUtil dataStructureUtil = new DataStructureUtil(); private final EventBus eventBus = mock(EventBus.class); private final GossipEncoding gossipEncoding = GossipEncoding.SSZ_SNAPPY; private final RecentChainData recentChainData = MemoryOnlyRecentChainData.create(eventBus); private final BlockValidator blockValidator = new BlockValidator(recentChainData, new StateTransition()); private final BeaconChainUtil beaconChainUtil = BeaconChainUtil.create(2, recentChainData); private BlockTopicHandler topicHandler = new BlockTopicHandler( gossipEncoding, dataStructureUtil.randomForkInfo(), blockValidator, eventBus); @BeforeEach public void setup() { beaconChainUtil.initializeStorage(); } @Test public void handleMessage_validBlock() throws Exception { final UInt64 nextSlot = recentChainData.getBestSlot().plus(UInt64.ONE); final SignedBeaconBlock block = beaconChainUtil.createBlockAtSlot(nextSlot); Bytes serialized = gossipEncoding.encode(block); beaconChainUtil.setSlot(nextSlot); final ValidationResult result = topicHandler.handleMessage(serialized).join(); assertThat(result).isEqualTo(ValidationResult.Valid); verify(eventBus).post(new GossipedBlockEvent(block)); } @Test public void handleMessage_validFutureBlock() throws Exception { final UInt64 nextSlot = recentChainData.getBestSlot().plus(UInt64.ONE); final SignedBeaconBlock block = beaconChainUtil.createBlockAtSlot(nextSlot); Bytes serialized = gossipEncoding.encode(block); beaconChainUtil.setSlot(recentChainData.getBestSlot()); final ValidationResult result = topicHandler.handleMessage(serialized).join(); assertThat(result).isEqualTo(ValidationResult.Ignore); verify(eventBus).post(new GossipedBlockEvent(block)); } @Test public void handleMessage_invalidBlock_unknownPreState() { SignedBeaconBlock block = dataStructureUtil.randomSignedBeaconBlock(1); Bytes serialized = gossipEncoding.encode(block); final ValidationResult result = topicHandler.handleMessage(serialized).join(); assertThat(result).isEqualTo(ValidationResult.Ignore); verify(eventBus).post(new GossipedBlockEvent(block)); } @Test public void handleMessage_invalidBlock_invalidSSZ() { Bytes serialized = Bytes.fromHexString("0x1234"); final ValidationResult result = topicHandler.handleMessage(serialized).join(); assertThat(result).isEqualTo(ValidationResult.Invalid); } @Test public void handleMessage_invalidBlock_wrongProposer() throws Exception { final UInt64 nextSlot = recentChainData.getBestSlot().plus(UInt64.ONE); final SignedBeaconBlock block = beaconChainUtil.createBlockAtSlotFromInvalidProposer(nextSlot); Bytes serialized = gossipEncoding.encode(block); beaconChainUtil.setSlot(nextSlot); final ValidationResult result = topicHandler.handleMessage(serialized).join(); assertThat(result).isEqualTo(ValidationResult.Invalid); verify(eventBus, never()).post(new GossipedBlockEvent(block)); } @Test public void returnProperTopicName() { final Bytes4 forkDigest = Bytes4.fromHexString("0x11223344"); final ForkInfo forkInfo = mock(ForkInfo.class); when(forkInfo.getForkDigest()).thenReturn(forkDigest); final BlockTopicHandler topicHandler = new BlockTopicHandler(gossipEncoding, forkInfo, blockValidator, eventBus); assertThat(topicHandler.getTopic()).isEqualTo("/eth2/11223344/beacon_block/ssz_snappy"); } }
xWhitey/Zamorozka-0.5.3-src
net/minecraft/client/renderer/tileentity/TileEntityShulkerBoxRenderer.java
<reponame>xWhitey/Zamorozka-0.5.3-src<filename>net/minecraft/client/renderer/tileentity/TileEntityShulkerBoxRenderer.java package net.minecraft.client.renderer.tileentity; import net.minecraft.block.BlockShulkerBox; import net.minecraft.block.state.IBlockState; import net.minecraft.client.model.ModelShulker; import net.minecraft.client.renderer.GlStateManager; import net.minecraft.client.renderer.entity.RenderShulker; import net.minecraft.tileentity.TileEntityShulkerBox; import net.minecraft.util.EnumFacing; public class TileEntityShulkerBoxRenderer extends TileEntitySpecialRenderer<TileEntityShulkerBox> { private final ModelShulker field_191285_a; public TileEntityShulkerBoxRenderer(ModelShulker p_i47216_1_) { this.field_191285_a = p_i47216_1_; } public void func_192841_a(TileEntityShulkerBox p_192841_1_, double p_192841_2_, double p_192841_4_, double p_192841_6_, float p_192841_8_, int p_192841_9_, float p_192841_10_) { EnumFacing enumfacing = EnumFacing.UP; if (p_192841_1_.hasWorldObj()) { IBlockState iblockstate = this.getWorld().getBlockState(p_192841_1_.getPos()); if (iblockstate.getBlock() instanceof BlockShulkerBox) { enumfacing = (EnumFacing)iblockstate.getValue(BlockShulkerBox.field_190957_a); } } GlStateManager.enableDepth(); GlStateManager.depthFunc(515); GlStateManager.depthMask(true); GlStateManager.disableCull(); if (p_192841_9_ >= 0) { this.bindTexture(DESTROY_STAGES[p_192841_9_]); GlStateManager.matrixMode(5890); GlStateManager.pushMatrix(); GlStateManager.scale(4.0F, 4.0F, 1.0F); GlStateManager.translate(0.0625F, 0.0625F, 0.0625F); GlStateManager.matrixMode(5888); } else { this.bindTexture(RenderShulker.SHULKER_ENDERGOLEM_TEXTURE[p_192841_1_.func_190592_s().getMetadata()]); } GlStateManager.pushMatrix(); GlStateManager.enableRescaleNormal(); if (p_192841_9_ < 0) { GlStateManager.color(1.0F, 1.0F, 1.0F, p_192841_10_); } GlStateManager.translate((float)p_192841_2_ + 0.5F, (float)p_192841_4_ + 1.5F, (float)p_192841_6_ + 0.5F); GlStateManager.scale(1.0F, -1.0F, -1.0F); GlStateManager.translate(0.0F, 1.0F, 0.0F); float f = 0.9995F; GlStateManager.scale(0.9995F, 0.9995F, 0.9995F); GlStateManager.translate(0.0F, -1.0F, 0.0F); switch (enumfacing) { case DOWN: GlStateManager.translate(0.0F, 2.0F, 0.0F); GlStateManager.rotate(180.0F, 1.0F, 0.0F, 0.0F); case UP: default: break; case NORTH: GlStateManager.translate(0.0F, 1.0F, 1.0F); GlStateManager.rotate(90.0F, 1.0F, 0.0F, 0.0F); GlStateManager.rotate(180.0F, 0.0F, 0.0F, 1.0F); break; case SOUTH: GlStateManager.translate(0.0F, 1.0F, -1.0F); GlStateManager.rotate(90.0F, 1.0F, 0.0F, 0.0F); break; case WEST: GlStateManager.translate(-1.0F, 1.0F, 0.0F); GlStateManager.rotate(90.0F, 1.0F, 0.0F, 0.0F); GlStateManager.rotate(-90.0F, 0.0F, 0.0F, 1.0F); break; case EAST: GlStateManager.translate(1.0F, 1.0F, 0.0F); GlStateManager.rotate(90.0F, 1.0F, 0.0F, 0.0F); GlStateManager.rotate(90.0F, 0.0F, 0.0F, 1.0F); } this.field_191285_a.base.render(0.0625F); GlStateManager.translate(0.0F, -p_192841_1_.func_190585_a(p_192841_8_) * 0.5F, 0.0F); GlStateManager.rotate(270.0F * p_192841_1_.func_190585_a(p_192841_8_), 0.0F, 1.0F, 0.0F); this.field_191285_a.lid.render(0.0625F); GlStateManager.enableCull(); GlStateManager.disableRescaleNormal(); GlStateManager.popMatrix(); GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F); if (p_192841_9_ >= 0) { GlStateManager.matrixMode(5890); GlStateManager.popMatrix(); GlStateManager.matrixMode(5888); } } }
yuantiku/ytk-learn
src/main/java/com/fenbi/ytklearn/param/RandomParams.java
/** * * Copyright (c) 2017 ytk-learn https://github.com/yuantiku * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.fenbi.ytklearn.param; import com.fenbi.ytklearn.utils.CheckUtils; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import lombok.Data; import java.io.File; import java.io.Serializable; import java.util.Arrays; /** * @author xialong */ @Data public class RandomParams implements Serializable { public static final String KEY = "random."; public Mode mode; public static enum Mode { NORMAL("normal"), UNIFORM("uniform"), UNKNOWN("unknown"); private String name; public String toString() { return name; } private Mode(String name) { this.name = name; } public static Mode getMode(String name) { for (Mode mode : values()) { if (name.equalsIgnoreCase(mode.toString())) { return mode; } } return UNKNOWN; } public static String allToString() { Mode []validModels = new Mode[values().length - 1]; for (int i = 0; i < validModels.length; i++) { validModels[i] = values()[i]; } return Arrays.toString(validModels); } } public int seed; public Normal normal; @Data public static class Normal { public static final String KEY = "normal."; public double mean; public double std; public Normal(Config config, String prefix) { mean = config.getDouble(prefix + KEY + "mean"); std = config.getDouble(prefix + KEY + "std"); } } public Uniform uniform; @Data public static class Uniform { public static final String KEY = "uniform."; public double range_start; public double range_end; public Uniform(Config config, String prefix) { range_start = config.getDouble(prefix + KEY + "range_start"); range_end = config.getDouble(prefix + KEY + "range_end"); } } public RandomParams(Config config, String prefix) { mode = Mode.getMode(config.getString(prefix + KEY + "mode")); seed = config.getInt(prefix + KEY + "seed"); normal = new Normal(config, prefix + KEY); uniform = new Uniform(config, prefix + KEY); CheckUtils.check(mode != Mode.UNKNOWN, "unknown %smode:%s, only support:%s", prefix + KEY, mode, Mode.allToString()); } public static void main(String []args) { Config config = ConfigFactory.parseFile(new File("config/model/fm.conf")); RandomParams randomParams = new RandomParams(config, ""); System.out.println(randomParams); } }
SemanticBeeng/toolbox
huginlink/src/main/java/eu/amidst/huginlink/examples/learning/ParallelPCExample.java
/* * * * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. * See the NOTICE file distributed with this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use * this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. * * */ package eu.amidst.huginlink.examples.learning; import com.google.common.base.Stopwatch; import eu.amidst.core.datastream.DataInstance; import eu.amidst.core.datastream.DataStream; import eu.amidst.core.io.BayesianNetworkLoader; import eu.amidst.core.learning.parametric.ParallelMaximumLikelihood; import eu.amidst.core.models.BayesianNetwork; import eu.amidst.core.utils.BayesianNetworkSampler; import eu.amidst.huginlink.learning.ParallelPC; import java.util.ArrayList; import java.util.Arrays; /** * This class provides a link to the <a href="https://www.hugin.com">Hugin</a>'s functionality to learn in parallel * the structure of a Bayesian network model from data using the PC algorithm. * * An important remark is that Hugin only allows to apply the PC algorithm over a data set completely loaded into RAM * memory. The case where our data set does not fit into memory, it solved in AMIDST in the following way. We learn * the structure using a smaller data set produced by <a href="https://en.wikipedia.org/wiki/Reservoir_sampling">Reservoir sampling</a> * and, then, we use AMIDST's {@link ParallelMaximumLikelihood} to learn the parameters of the BN model over the * whole data set. * * <p> For further details about the implementation of the parallel PC algorithm look at the following paper: </p> * * <i> <NAME>., <NAME>., <NAME>., <NAME>., <NAME>. (2015). Parallelization of the PC * Algorithm (2015). The XVI Conference of the Spanish Association for Artificial Intelligence (CAEPIA'15), pages 14-24 </i> */ public class ParallelPCExample { public static void main(String[] args) throws Exception { //We load a Bayesian network to generate a data stream //using BayesianNewtorkSampler class. int sampleSize = 100000; BayesianNetwork bn = BayesianNetworkLoader.loadFromFile("networks/dataWeka/Pigs.bn"); BayesianNetworkSampler sampler = new BayesianNetworkSampler(bn); //We fix the number of samples in memory used for performing the structural learning. //They are randomly sub-sampled using Reservoir sampling. int samplesOnMemory = 5000; //We make different trials with different number of cores ArrayList<Integer> vNumCores = new ArrayList(Arrays.asList(1, 2, 3, 4)); for (Integer numCores : vNumCores) { System.out.println("Learning PC: " + samplesOnMemory + " samples on memory, " + numCores + " core/s ..."); DataStream<DataInstance> data = sampler.sampleToDataStream(sampleSize); //The class ParallelTAN is created ParallelPC parallelPC = new ParallelPC(); //We activate the parallel mode. parallelPC.setParallelMode(true); //We set the number of cores to be used for the structural learning parallelPC.setNumCores(numCores); //We set the number of samples to be used for the learning the structure parallelPC.setNumSamplesOnMemory(samplesOnMemory); Stopwatch watch = Stopwatch.createStarted(); //We just invoke this mode to learn a BN model for the data stream BayesianNetwork model = parallelPC.learn(data); System.out.println(watch.stop()); } } }
thariyarox/ORG-carbon-identity
components/identity/org.wso2.carbon.identity.oauth/src/main/java/org/wso2/carbon/identity/oauth/tokenprocessor/EncryptionDecryptionPersistenceProcessor.java
<reponame>thariyarox/ORG-carbon-identity /* *Copyright (c) 2005-2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * *WSO2 Inc. licenses this file to you under the Apache License, *Version 2.0 (the "License"); you may not use this file except *in compliance with the License. *You may obtain a copy of the License at * *http://www.apache.org/licenses/LICENSE-2.0 * *Unless required by applicable law or agreed to in writing, *software distributed under the License is distributed on an *"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *KIND, either express or implied. See the License for the *specific language governing permissions and limitations *under the License. */ package org.wso2.carbon.identity.oauth.tokenprocessor; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.core.util.CryptoException; import org.wso2.carbon.core.util.CryptoUtil; import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception; /** * An implementation of <Code>TokenPersistenceProcessor</Code> * which is used when storing encrypted tokens. */ public class EncryptionDecryptionPersistenceProcessor implements TokenPersistenceProcessor { protected Log log = LogFactory.getLog(EncryptionDecryptionPersistenceProcessor.class); public String getPreprocessedClientId(String processedClientId) throws IdentityOAuth2Exception { try { return new String(CryptoUtil.getDefaultCryptoUtil().base64DecodeAndDecrypt(processedClientId)); } catch (CryptoException e) { log.debug(e.getMessage(), e); throw new IdentityOAuth2Exception(e.getMessage()); } } public String getProcessedClientId(String clientId) throws IdentityOAuth2Exception { try { return CryptoUtil.getDefaultCryptoUtil().encryptAndBase64Encode(clientId.getBytes()); } catch (CryptoException e) { log.debug(e.getMessage(), e); throw new IdentityOAuth2Exception(e.getMessage()); } } public String getPreprocessedClientSecret(String processedClientSecret) throws IdentityOAuth2Exception { try { return new String(CryptoUtil.getDefaultCryptoUtil().base64DecodeAndDecrypt(processedClientSecret)); } catch (CryptoException e) { log.debug(e.getMessage(), e); throw new IdentityOAuth2Exception(e.getMessage()); } } public String getProcessedClientSecret(String clientSecret) throws IdentityOAuth2Exception { try { return CryptoUtil.getDefaultCryptoUtil().encryptAndBase64Encode(clientSecret.getBytes()); } catch (CryptoException e) { log.debug(e.getMessage(), e); throw new IdentityOAuth2Exception(e.getMessage()); } } public String getPreprocessedAuthzCode(String processedAuthzCode) throws IdentityOAuth2Exception { try { return new String(CryptoUtil.getDefaultCryptoUtil().base64DecodeAndDecrypt(processedAuthzCode)); } catch (CryptoException e) { log.debug(e.getMessage(), e); throw new IdentityOAuth2Exception(e.getMessage()); } } public String getProcessedAuthzCode(String authzCode) throws IdentityOAuth2Exception { try { return CryptoUtil.getDefaultCryptoUtil().encryptAndBase64Encode(authzCode.getBytes()); } catch (CryptoException e) { log.debug(e.getMessage(), e); throw new IdentityOAuth2Exception(e.getMessage()); } } public String getPreprocessedAccessTokenIdentifier(String processedAccessTokenIdentifier) throws IdentityOAuth2Exception { try { return new String(CryptoUtil.getDefaultCryptoUtil().base64DecodeAndDecrypt( processedAccessTokenIdentifier)); } catch (CryptoException e) { log.debug(e.getMessage(), e); throw new IdentityOAuth2Exception(e.getMessage()); } } public String getProcessedAccessTokenIdentifier(String accessTokenIdentifier) throws IdentityOAuth2Exception { try { return CryptoUtil.getDefaultCryptoUtil().encryptAndBase64Encode( accessTokenIdentifier.getBytes()); } catch (CryptoException e) { log.debug(e.getMessage(), e); throw new IdentityOAuth2Exception(e.getMessage()); } } public String getPreprocessedRefreshToken(String processedRefreshToken) throws IdentityOAuth2Exception { try { return new String(CryptoUtil.getDefaultCryptoUtil().base64DecodeAndDecrypt( processedRefreshToken)); } catch (CryptoException e) { log.debug(e.getMessage(), e); throw new IdentityOAuth2Exception(e.getMessage()); } } public String getProcessedRefreshToken(String refreshToken) throws IdentityOAuth2Exception { try { return CryptoUtil.getDefaultCryptoUtil().encryptAndBase64Encode( refreshToken.getBytes()); } catch (CryptoException e) { log.debug(e.getMessage(), e); throw new IdentityOAuth2Exception(e.getMessage()); } } }
harness/harness-go-sdk
harness/nextgen/model_triggered_by.go
/* * CD NextGen API Reference * * This is the Open Api Spec 3 for the NextGen Manager. This is under active development. Beware of the breaking change with respect to the generated code stub # Authentication <!-- ReDoc-Inject: <security-definitions> --> * * API version: 3.0 * Contact: <EMAIL> * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) */ package nextgen type TriggeredBy struct { UnknownFields *UnknownFieldSet `json:"unknownFields,omitempty"` Identifier string `json:"identifier,omitempty"` Initialized bool `json:"initialized,omitempty"` UuidBytes *ByteString `json:"uuidBytes,omitempty"` IdentifierBytes *ByteString `json:"identifierBytes,omitempty"` ExtraInfoCount int32 `json:"extraInfoCount,omitempty"` ExtraInfo map[string]string `json:"extraInfo,omitempty"` ExtraInfoMap map[string]string `json:"extraInfoMap,omitempty"` Uuid string `json:"uuid,omitempty"` SerializedSize int32 `json:"serializedSize,omitempty"` ParserForType *ParserTriggeredBy `json:"parserForType,omitempty"` DefaultInstanceForType *TriggeredBy `json:"defaultInstanceForType,omitempty"` AllFields map[string]interface{} `json:"allFields,omitempty"` InitializationErrorString string `json:"initializationErrorString,omitempty"` DescriptorForType *Descriptor `json:"descriptorForType,omitempty"` MemoizedSerializedSize int32 `json:"memoizedSerializedSize,omitempty"` }
iganna/lspec
ugene/src/plugins/biostruct3d_view/src/WormsGLRenderer.h
<reponame>iganna/lspec /** * UGENE - Integrated Bioinformatics Tools. * Copyright (C) 2008-2012 UniPro <<EMAIL>> * http://ugene.unipro.ru * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * MA 02110-1301, USA. */ #ifndef _U2_BIOSTRUCT3D_WORMS_RENDERER_H_ #define _U2_BIOSTRUCT3D_WORMS_RENDERER_H_ #include <QtCore/QSharedDataPointer> #include <QtCore/QVector> #include <QtCore/QMap> #include <QtCore/QPair> #include <U2Core/Vector3D.h> #include "BioStruct3DGLRender.h" #include "GraphicUtils.h" class Object3D; class AtomData; namespace U2 { class WormsGLRenderer : public BioStruct3DGLRenderer { // BioPolymer molecule struct Monomer { Monomer() : alphaCarbon(NULL), carbonylOxygen(NULL) {} QSharedDataPointer<AtomData> alphaCarbon; QSharedDataPointer<AtomData> carbonylOxygen; }; struct BioPolymerModel { QMap<int,Monomer> monomerMap; }; struct BioPolymer { // multiple models QVector<BioPolymerModel> bpModels; }; QMap<int, BioPolymer > bioPolymerMap; // Worm Model typedef QVector<SharedAtom> AtomsVector; struct WormModel { //! These coords required to draw worms endings correctly Vector3D openingAtom, closingAtom; // Worm building atom coords AtomsVector atoms; // Objects representing secondary structure QVector<Object3D*> objects; }; struct Worm { QVector<WormModel> models; }; QMap<int, Worm> wormMap; Color4f atomColor; void createObjects3D(); Object3D* createStrand3D( int startId, int endId, const BioPolymerModel &bpModel ); Object3D* createHelix3D( int startId, int endId, const BioPolymerModel &bpModel ); void createWorms(); void drawWorms(); void drawSecondaryStructure(); const float* getAtomColor(const SharedAtom& atom); protected: WormsGLRenderer(const BioStruct3D& struc, const BioStruct3DColorScheme* s, const QList<int> &shownModels, const BioStruct3DRendererSettings *settings); public: virtual ~WormsGLRenderer(); virtual void create(); virtual void drawBioStruct3D(); virtual void updateColorScheme(); virtual void updateShownModels(); virtual void updateSettings(); public: static bool isAvailableFor(const BioStruct3D &); RENDERER_FACTORY(WormsGLRenderer) }; } //namespace #endif // _U2_BIOSTRUCT3D_WORMS_RENDERER_H_
ybx945ybx/yhy
app/src/main/java/com/quanyan/yhy/ui/views/CustomerRatingBarGo.java
<reponame>ybx945ybx/yhy package com.quanyan.yhy.ui.views; import android.content.Context; import android.graphics.drawable.Drawable; import android.util.AttributeSet; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.widget.ImageView; import android.widget.LinearLayout; import com.quanyan.yhy.R; public class CustomerRatingBarGo extends LinearLayout implements View.OnTouchListener { private ImageView image1; private ImageView image2; private ImageView image3; private ImageView image4; private ImageView image5; private int progress = 4; private static Drawable defDraw; private static Drawable highDraw; private OnProgressChangeListener listener = null; private View ratingView; private boolean touch; public interface OnProgressChangeListener { public void onProgressChange(View view, int progress); } public void setOnProgressChangeListener(View view, OnProgressChangeListener listener, boolean touch) { ratingView = view; this.listener = listener; this.touch = touch; } public CustomerRatingBarGo(Context context, AttributeSet attrs) { super(context, attrs); LayoutInflater inflater = (LayoutInflater) context .getSystemService(Context.LAYOUT_INFLATER_SERVICE); inflater.inflate(R.layout.style_rating_bar, this); image1 = (ImageView) findViewById(R.id.rating_one); image2 = (ImageView) findViewById(R.id.rating_two); image3 = (ImageView) findViewById(R.id.rating_three); image4 = (ImageView) findViewById(R.id.rating_four); image5 = (ImageView) findViewById(R.id.rating_five); initDrawable(); image1.setOnTouchListener(this); image2.setOnTouchListener(this); image3.setOnTouchListener(this); image4.setOnTouchListener(this); image5.setOnTouchListener(this); } public void setSize(ImageView view, int x, int y) { view.setLayoutParams(new LayoutParams(28, 26)); } public void destroy() { if (defDraw != null) { defDraw = null; } if (highDraw != null) { highDraw = null; } } private void initDrawable() { if (defDraw == null) { defDraw = getResources().getDrawable(R.mipmap.icons_star_hollow); } if (highDraw == null) { highDraw = getResources().getDrawable(R.mipmap.icon_star); } } public int getProgress() { return progress; } @SuppressWarnings("deprecation") public void setProgress(int progress) { switch (progress) { case 1: image1.setBackgroundDrawable(highDraw); // image2.setVisibility(View.GONE); // image3.setVisibility(View.GONE); // image4.setVisibility(View.GONE); // image5.setVisibility(View.GONE); image2.setBackgroundDrawable(defDraw); image3.setBackgroundDrawable(defDraw); image4.setBackgroundDrawable(defDraw); image5.setBackgroundDrawable(defDraw); if (listener != null && ratingView != null) { listener.onProgressChange(ratingView, 1); } break; case 2: image1.setBackgroundDrawable(highDraw); image2.setBackgroundDrawable(highDraw); // image3.setVisibility(View.GONE); // image4.setVisibility(View.GONE); // image5.setVisibility(View.GONE); image3.setBackgroundDrawable(defDraw); image4.setBackgroundDrawable(defDraw); image5.setBackgroundDrawable(defDraw); if (listener != null && ratingView != null) { listener.onProgressChange(ratingView, 2); } break; case 3: image1.setBackgroundDrawable(highDraw); image2.setBackgroundDrawable(highDraw); image3.setBackgroundDrawable(highDraw); // image4.setVisibility(View.GONE); // image5.setVisibility(View.GONE); image4.setBackgroundDrawable(defDraw); image5.setBackgroundDrawable(defDraw); if (listener != null && ratingView != null) { listener.onProgressChange(ratingView, 3); } break; case 4: image1.setBackgroundDrawable(highDraw); image2.setBackgroundDrawable(highDraw); image3.setBackgroundDrawable(highDraw); image4.setBackgroundDrawable(highDraw); // image5.setVisibility(View.GONE); image5.setBackgroundDrawable(defDraw); if (listener != null && ratingView != null) { listener.onProgressChange(ratingView, 4); } break; case 5: image1.setBackgroundDrawable(highDraw); image2.setBackgroundDrawable(highDraw); image3.setBackgroundDrawable(highDraw); image4.setBackgroundDrawable(highDraw); image5.setBackgroundDrawable(highDraw); if (listener != null && ratingView != null) { listener.onProgressChange(ratingView, 5); } break; default: break; } } @Override public boolean onTouch(View v, MotionEvent event) { if (!touch) { return false; } switch (v.getId()) { case R.id.rating_one: progress = 1; setProgress(1); if (listener != null && ratingView != null) { listener.onProgressChange(ratingView, 1); } break; case R.id.rating_two: progress = 2; setProgress(2); if (listener != null && ratingView != null) { listener.onProgressChange(ratingView, 2); } break; case R.id.rating_three: progress = 3; setProgress(3); if (listener != null && ratingView != null) { listener.onProgressChange(ratingView, 3); } break; case R.id.rating_four: progress = 4; setProgress(4); if (listener != null && ratingView != null) { listener.onProgressChange(ratingView, 4); } break; case R.id.rating_five: progress = 5; setProgress(5); if (listener != null && ratingView != null) { listener.onProgressChange(ratingView, 5); } break; default: break; } return false; } }
srikanthprathi/flatpack
flatpack-samples/src/main/java/net/sf/flatpack/examples/createsamplecsv/CSVTestFileCreator.java
<gh_stars>10-100 package net.sf.flatpack.examples.createsamplecsv; import java.io.FileWriter; import java.io.PrintWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /* * Created on Nov 30, 2005 * */ /** * @author zepernick * * Creates a sample csv file with the specified number of columns and rows */ public class CSVTestFileCreator { private static final Logger LOG = LoggerFactory.getLogger(CSVTestFileCreator.class); public static void main(final String[] args) { int cols = 0; int rows = 0; if (args.length != 2) { printUsage(); return; } try { cols = Integer.parseInt(args[0]); rows = Integer.parseInt(args[1]); } catch (final Exception ex) { printUsage(); return; } createFile(cols, rows); } public static void createFile(final int cols, final int rows) { createFile(cols, rows, "SampleCSV.csv"); } public static void createFile(final int cols, final int rows, final String filename) { try (FileWriter fw = new FileWriter(filename); PrintWriter out = new PrintWriter(fw)) { // write the column names across the top of the file for (int i = 1; i <= cols; i++) { if (i > 1) { out.write(","); } out.write("\"column " + i + "\""); } out.write("\r\n"); out.flush(); // write the rows for (int i = 1; i <= rows; i++) { for (int j = 1; j <= cols; j++) { if (j > 1) { out.write(","); } out.write("\"data " + j + "\""); } out.write("\r\n"); out.flush(); if (i % 100000 == 0) { System.out.print("."); } } } catch (final Exception ex) { LOG.error("Issue", ex); } } private static void printUsage() { System.out.println("INVALID USAGE..."); System.out.println("PARAMETER 1 = # OF COLUMNS"); System.out.println("PARAMETER 2 = # OF ROWS"); System.out.println("Example - java CSVTestFileCreator 10 100"); } }
skrauchenia/akka
akka-docs/rst/java/code/docs/jrouting/CustomRouterDocTest.java
<filename>akka-docs/rst/java/code/docs/jrouting/CustomRouterDocTest.java /** * Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com> */ package docs.jrouting; import akka.routing.FromConfig; import akka.routing.RoundRobinRoutingLogic; import akka.routing.Routee; import akka.routing.RoutingLogic; import akka.routing.SeveralRoutees; import akka.testkit.AkkaJUnitActorSystemResource; import org.junit.ClassRule; import org.junit.Test; import static org.junit.Assert.*; import com.typesafe.config.ConfigFactory; import scala.collection.immutable.IndexedSeq; import static akka.japi.Util.immutableIndexedSeq; import docs.jrouting.RouterDocTest.Parent; import docs.jrouting.RouterDocTest.Workers; import docs.routing.CustomRouterDocSpec; import akka.testkit.JavaTestKit; import akka.actor.ActorRef; import akka.actor.ActorSystem; import akka.actor.Props; //#imports1 import akka.actor.UntypedActor; import java.io.Serializable; import java.util.ArrayList; import java.util.List; //#imports1 public class CustomRouterDocTest { @ClassRule public static AkkaJUnitActorSystemResource actorSystemResource = new AkkaJUnitActorSystemResource("CustomRouterDocTest", ConfigFactory.parseString(CustomRouterDocSpec.jconfig())); private final ActorSystem system = actorSystemResource.getSystem(); static //#routing-logic public class RedundancyRoutingLogic implements RoutingLogic { private final int nbrCopies; public RedundancyRoutingLogic(int nbrCopies) { this.nbrCopies = nbrCopies; } RoundRobinRoutingLogic roundRobin = new RoundRobinRoutingLogic(); @Override public Routee select(Object message, IndexedSeq<Routee> routees) { List<Routee> targets = new ArrayList<Routee>(); for (int i = 0; i < nbrCopies; i++) { targets.add(roundRobin.select(message, routees)); } return new SeveralRoutees(targets); } } //#routing-logic static //#unit-test-logic public final class TestRoutee implements Routee { public final int n; public TestRoutee(int n) { this.n = n; } @Override public void send(Object message, ActorRef sender) { } @Override public int hashCode() { return n; } @Override public boolean equals(Object obj) { return (obj instanceof TestRoutee) && n == ((TestRoutee) obj).n; } } //#unit-test-logic static public class Storage extends UntypedActor { public void onReceive(Object msg) { getSender().tell(msg, getSelf()); } } @Test public void unitTestRoutingLogic() { //#unit-test-logic RedundancyRoutingLogic logic = new RedundancyRoutingLogic(3); List<Routee> routeeList = new ArrayList<Routee>(); for (int n = 1; n <= 7; n++) { routeeList.add(new TestRoutee(n)); } IndexedSeq<Routee> routees = immutableIndexedSeq(routeeList); SeveralRoutees r1 = (SeveralRoutees) logic.select("msg", routees); assertEquals(r1.getRoutees().get(0), routeeList.get(0)); assertEquals(r1.getRoutees().get(1), routeeList.get(1)); assertEquals(r1.getRoutees().get(2), routeeList.get(2)); SeveralRoutees r2 = (SeveralRoutees) logic.select("msg", routees); assertEquals(r2.getRoutees().get(0), routeeList.get(3)); assertEquals(r2.getRoutees().get(1), routeeList.get(4)); assertEquals(r2.getRoutees().get(2), routeeList.get(5)); SeveralRoutees r3 = (SeveralRoutees) logic.select("msg", routees); assertEquals(r3.getRoutees().get(0), routeeList.get(6)); assertEquals(r3.getRoutees().get(1), routeeList.get(0)); assertEquals(r3.getRoutees().get(2), routeeList.get(1)); //#unit-test-logic } @Test public void demonstrateUsageOfCustomRouter() { new JavaTestKit(system) {{ //#usage-1 for (int n = 1; n <= 10; n++) { system.actorOf(Props.create(Storage.class), "s" + n); } List<String> paths = new ArrayList<String>(); for (int n = 1; n <= 10; n++) { paths.add("/user/s" + n); } ActorRef redundancy1 = system.actorOf(new RedundancyGroup(paths, 3).props(), "redundancy1"); redundancy1.tell("important", getTestActor()); //#usage-1 for (int i = 0; i < 3; i++) { expectMsgEquals("important"); } //#usage-2 ActorRef redundancy2 = system.actorOf(FromConfig.getInstance().props(), "redundancy2"); redundancy2.tell("very important", getTestActor()); //#usage-2 for (int i = 0; i < 5; i++) { expectMsgEquals("very important"); } }}; } }
power-electro/phantomjs-Gohstdriver-DIY-openshift
src/qt/qtbase/src/corelib/tools/qline.cpp
/**************************************************************************** ** ** Copyright (C) 2015 The Qt Company Ltd. ** Contact: http://www.qt.io/licensing/ ** ** This file is part of the QtCore module of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:LGPL21$ ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and The Qt Company. For licensing terms ** and conditions see http://www.qt.io/terms-conditions. For further ** information use the contact form at http://www.qt.io/contact-us. ** ** GNU Lesser General Public License Usage ** Alternatively, this file may be used under the terms of the GNU Lesser ** General Public License version 2.1 or version 3 as published by the Free ** Software Foundation and appearing in the file LICENSE.LGPLv21 and ** LICENSE.LGPLv3 included in the packaging of this file. Please review the ** following information to ensure the GNU Lesser General Public License ** requirements will be met: https://www.gnu.org/licenses/lgpl.html and ** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html. ** ** As a special exception, The Qt Company gives you certain additional ** rights. These rights are described in The Qt Company LGPL Exception ** version 1.1, included in the file LGPL_EXCEPTION.txt in this package. ** ** $QT_END_LICENSE$ ** ****************************************************************************/ #include "qline.h" #include "qdebug.h" #include "qdatastream.h" #include "qmath.h" #include <private/qnumeric_p.h> QT_BEGIN_NAMESPACE /*! \class QLine \inmodule QtCore \ingroup painting \brief The QLine class provides a two-dimensional vector using integer precision. A QLine describes a finite length line (or a line segment) on a two-dimensional surface. The start and end points of the line are specified using integer point accuracy for coordinates. Use the QLineF constructor to retrieve a floating point copy. \table \row \li \inlineimage qline-point.png \li \inlineimage qline-coordinates.png \endtable The positions of the line's start and end points can be retrieved using the p1(), x1(), y1(), p2(), x2(), and y2() functions. The dx() and dy() functions return the horizontal and vertical components of the line. Use isNull() to determine whether the QLine represents a valid line or a null line. Finally, the line can be translated a given offset using the translate() function. \sa QLineF, QPolygon, QRect */ /*! \fn QLine::QLine() Constructs a null line. */ /*! \fn QLine::QLine(const QPoint &p1, const QPoint &p2) Constructs a line object that represents the line between \a p1 and \a p2. */ /*! \fn QLine::QLine(int x1, int y1, int x2, int y2) Constructs a line object that represents the line between (\a x1, \a y1) and (\a x2, \a y2). */ /*! \fn bool QLine::isNull() const Returns \c true if the line is not set up with valid start and end point; otherwise returns \c false. */ /*! \fn QPoint QLine::p1() const Returns the line's start point. \sa x1(), y1(), p2() */ /*! \fn QPoint QLine::p2() const Returns the line's end point. \sa x2(), y2(), p1() */ /*! \fn int QLine::x1() const Returns the x-coordinate of the line's start point. \sa p1() */ /*! \fn int QLine::y1() const Returns the y-coordinate of the line's start point. \sa p1() */ /*! \fn int QLine::x2() const Returns the x-coordinate of the line's end point. \sa p2() */ /*! \fn int QLine::y2() const Returns the y-coordinate of the line's end point. \sa p2() */ /*! \fn int QLine::dx() const Returns the horizontal component of the line's vector. \sa dy() */ /*! \fn int QLine::dy() const Returns the vertical component of the line's vector. \sa dx() */ /*! \fn bool QLine::operator!=(const QLine &line) const Returns \c true if the given \a line is not the same as \e this line. A line is different from another line if any of their start or end points differ, or the internal order of the points is different. */ /*! \fn bool QLine::operator==(const QLine &line) const Returns \c true if the given \a line is the same as \e this line. A line is identical to another line if the start and end points are identical, and the internal order of the points is the same. */ /*! \fn void QLine::translate(const QPoint &offset) Translates this line by the given \a offset. */ /*! \fn void QLine::translate(int dx, int dy) \overload Translates this line the distance specified by \a dx and \a dy. */ /*! \fn QLine QLine::translated(const QPoint &offset) const \since 4.4 Returns this line translated by the given \a offset. */ /*! \fn QLine QLine::translated(int dx, int dy) const \overload \since 4.4 Returns this line translated the distance specified by \a dx and \a dy. */ /*! \fn void QLine::setP1(const QPoint &p1) \since 4.4 Sets the starting point of this line to \a p1. \sa setP2(), p1() */ /*! \fn void QLine::setP2(const QPoint &p2) \since 4.4 Sets the end point of this line to \a p2. \sa setP1(), p2() */ /*! \fn void QLine::setPoints(const QPoint &p1, const QPoint &p2) \since 4.4 Sets the start point of this line to \a p1 and the end point of this line to \a p2. \sa setP1(), setP2(), p1(), p2() */ /*! \fn void QLine::setLine(int x1, int y1, int x2, int y2) \since 4.4 Sets this line to the start in \a x1, \a y1 and end in \a x2, \a y2. \sa setP1(), setP2(), p1(), p2() */ #ifndef QT_NO_DEBUG_STREAM QDebug operator<<(QDebug dbg, const QLine &p) { QDebugStateSaver saver(dbg); dbg.nospace() << "QLine(" << p.p1() << ',' << p.p2() << ')'; return dbg; } #endif #ifndef QT_NO_DATASTREAM /*! \relates QLine Writes the given \a line to the given \a stream and returns a reference to the stream. \sa {Serializing Qt Data Types} */ QDataStream &operator<<(QDataStream &stream, const QLine &line) { stream << line.p1() << line.p2(); return stream; } /*! \relates QLine Reads a line from the given \a stream into the given \a line and returns a reference to the stream. \sa {Serializing Qt Data Types} */ QDataStream &operator>>(QDataStream &stream, QLine &line) { QPoint p1, p2; stream >> p1; stream >> p2; line = QLine(p1, p2); return stream; } #endif // QT_NO_DATASTREAM #ifndef M_2PI #define M_2PI 6.28318530717958647692528676655900576 #endif /*! \class QLineF \inmodule QtCore \ingroup painting \brief The QLineF class provides a two-dimensional vector using floating point precision. A QLineF describes a finite length line (or line segment) on a two-dimensional surface. QLineF defines the start and end points of the line using floating point accuracy for coordinates. Use the toLine() function to retrieve an integer based copy of this line. \table \row \li \inlineimage qline-point.png \li \inlineimage qline-coordinates.png \endtable The positions of the line's start and end points can be retrieved using the p1(), x1(), y1(), p2(), x2(), and y2() functions. The dx() and dy() functions return the horizontal and vertical components of the line, respectively. The line's length can be retrieved using the length() function, and altered using the setLength() function. Similarly, angle() and setAngle() are respectively used for retrieving and altering the angle of the line. Use the isNull() function to determine whether the QLineF represents a valid line or a null line. The intersect() function determines the IntersectType for this line and a given line, while the angleTo() function returns the angle between the lines. In addition, the unitVector() function returns a line that has the same starting point as this line, but with a length of only 1, while the normalVector() function returns a line that is perpendicular to this line with the same starting point and length. Finally, the line can be translated a given offset using the translate() function, and can be traversed using the pointAt() function. \sa QLine, QPolygonF, QRectF */ /*! \enum QLineF::IntersectType Describes the intersection between two lines. \table \row \li \inlineimage qlinef-unbounded.png \li \inlineimage qlinef-bounded.png \row \li QLineF::UnboundedIntersection \li QLineF::BoundedIntersection \endtable \value NoIntersection Indicates that the lines do not intersect; i.e. they are parallel. \value UnboundedIntersection The two lines intersect, but not within the range defined by their lengths. This will be the case if the lines are not parallel. intersect() will also return this value if the intersect point is within the start and end point of only one of the lines. \value BoundedIntersection The two lines intersect with each other within the start and end points of each line. \sa intersect() */ /*! \fn QLineF::QLineF() Constructs a null line. */ /*! \fn QLineF::QLineF(const QPointF &p1, const QPointF &p2) Constructs a line object that represents the line between \a p1 and \a p2. */ /*! \fn QLineF::QLineF(qreal x1, qreal y1, qreal x2, qreal y2) Constructs a line object that represents the line between (\a x1, \a y1) and (\a x2, \a y2). */ /*! \fn QLineF::QLineF(const QLine &line) Construct a QLineF object from the given integer-based \a line. \sa toLine() */ /*! \fn bool QLineF::isNull() const Returns \c true if the line is not set up with valid start and end point; otherwise returns \c false. */ /*! \fn QPointF QLineF::p1() const Returns the line's start point. \sa x1(), y1(), p2() */ /*! \fn QPointF QLineF::p2() const Returns the line's end point. \sa x2(), y2(), p1() */ /*! \fn QLine QLineF::toLine() const Returns an integer based copy of this line. Note that the returned line's start and end points are rounded to the nearest integer. \sa QLineF() */ /*! \fn qreal QLineF::x1() const Returns the x-coordinate of the line's start point. \sa p1() */ /*! \fn qreal QLineF::y1() const Returns the y-coordinate of the line's start point. \sa p1() */ /*! \fn qreal QLineF::x2() const Returns the x-coordinate of the line's end point. \sa p2() */ /*! \fn qreal QLineF::y2() const Returns the y-coordinate of the line's end point. \sa p2() */ /*! \fn qreal QLineF::dx() const Returns the horizontal component of the line's vector. \sa dy(), pointAt() */ /*! \fn qreal QLineF::dy() const Returns the vertical component of the line's vector. \sa dx(), pointAt() */ /*! \fn QLineF::setLength(qreal length) Sets the length of the line to the given \a length. QLineF will move the end point - p2() - of the line to give the line its new length. If the line is a null line, the length will remain zero regardless of the length specified. \sa length(), isNull() */ /*! \fn QLineF QLineF::normalVector() const Returns a line that is perpendicular to this line with the same starting point and length. \image qlinef-normalvector.png \sa unitVector() */ /*! \fn bool QLineF::operator!=(const QLineF &line) const Returns \c true if the given \a line is not the same as \e this line. A line is different from another line if their start or end points differ, or the internal order of the points is different. */ /*! \fn bool QLineF::operator==(const QLineF &line) const Returns \c true if the given \a line is the same as this line. A line is identical to another line if the start and end points are identical, and the internal order of the points is the same. */ /*! \fn qreal QLineF::pointAt(qreal t) const Returns the point at the parameterized position specified by \a t. The function returns the line's start point if t = 0, and its end point if t = 1. \sa dx(), dy() */ /*! Returns the length of the line. \sa setLength() */ qreal QLineF::length() const { qreal x = pt2.x() - pt1.x(); qreal y = pt2.y() - pt1.y(); return qSqrt(x*x + y*y); } /*! \since 4.4 Returns the angle of the line in degrees. The return value will be in the range of values from 0.0 up to but not including 360.0. The angles are measured counter-clockwise from a point on the x-axis to the right of the origin (x > 0). \sa setAngle() */ qreal QLineF::angle() const { const qreal dx = pt2.x() - pt1.x(); const qreal dy = pt2.y() - pt1.y(); const qreal theta = qAtan2(-dy, dx) * 360.0 / M_2PI; const qreal theta_normalized = theta < 0 ? theta + 360 : theta; if (qFuzzyCompare(theta_normalized, qreal(360))) return qreal(0); else return theta_normalized; } /*! \since 4.4 Sets the angle of the line to the given \a angle (in degrees). This will change the position of the second point of the line such that the line has the given angle. Positive values for the angles mean counter-clockwise while negative values mean the clockwise direction. Zero degrees is at the 3 o'clock position. \sa angle() */ void QLineF::setAngle(qreal angle) { const qreal angleR = angle * M_2PI / 360.0; const qreal l = length(); const qreal dx = qCos(angleR) * l; const qreal dy = -qSin(angleR) * l; pt2.rx() = pt1.x() + dx; pt2.ry() = pt1.y() + dy; } /*! \since 4.4 Returns a QLineF with the given \a length and \a angle. The first point of the line will be on the origin. Positive values for the angles mean counter-clockwise while negative values mean the clockwise direction. Zero degrees is at the 3 o'clock position. */ QLineF QLineF::fromPolar(qreal length, qreal angle) { const qreal angleR = angle * M_2PI / 360.0; return QLineF(0, 0, qCos(angleR) * length, -qSin(angleR) * length); } /*! Returns the unit vector for this line, i.e a line starting at the same point as \e this line with a length of 1.0. \sa normalVector() */ QLineF QLineF::unitVector() const { qreal x = pt2.x() - pt1.x(); qreal y = pt2.y() - pt1.y(); qreal len = qSqrt(x*x + y*y); QLineF f(p1(), QPointF(pt1.x() + x/len, pt1.y() + y/len)); #ifndef QT_NO_DEBUG if (qAbs(f.length() - 1) >= 0.001) qWarning("QLine::unitVector: New line does not have unit length"); #endif return f; } /*! \fn QLineF::IntersectType QLineF::intersect(const QLineF &line, QPointF *intersectionPoint) const Returns a value indicating whether or not \e this line intersects with the given \a line. The actual intersection point is extracted to \a intersectionPoint (if the pointer is valid). If the lines are parallel, the intersection point is undefined. */ QLineF::IntersectType QLineF::intersect(const QLineF &l, QPointF *intersectionPoint) const { // ipmlementation is based on Graphics Gems III's "Faster Line Segment Intersection" const QPointF a = pt2 - pt1; const QPointF b = l.pt1 - l.pt2; const QPointF c = pt1 - l.pt1; const qreal denominator = a.y() * b.x() - a.x() * b.y(); if (denominator == 0 || !qt_is_finite(denominator)) return NoIntersection; const qreal reciprocal = 1 / denominator; const qreal na = (b.y() * c.x() - b.x() * c.y()) * reciprocal; if (intersectionPoint) *intersectionPoint = pt1 + a * na; if (na < 0 || na > 1) return UnboundedIntersection; const qreal nb = (a.x() * c.y() - a.y() * c.x()) * reciprocal; if (nb < 0 || nb > 1) return UnboundedIntersection; return BoundedIntersection; } /*! \fn void QLineF::translate(const QPointF &offset) Translates this line by the given \a offset. */ /*! \fn void QLineF::translate(qreal dx, qreal dy) \overload Translates this line the distance specified by \a dx and \a dy. */ /*! \fn QLineF QLineF::translated(const QPointF &offset) const \since 4.4 Returns this line translated by the given \a offset. */ /*! \fn QLineF QLineF::translated(qreal dx, qreal dy) const \overload \since 4.4 Returns this line translated the distance specified by \a dx and \a dy. */ /*! \fn void QLineF::setP1(const QPointF &p1) \since 4.4 Sets the starting point of this line to \a p1. \sa setP2(), p1() */ /*! \fn void QLineF::setP2(const QPointF &p2) \since 4.4 Sets the end point of this line to \a p2. \sa setP1(), p2() */ /*! \fn void QLineF::setPoints(const QPointF &p1, const QPointF &p2) \since 4.4 Sets the start point of this line to \a p1 and the end point of this line to \a p2. \sa setP1(), setP2(), p1(), p2() */ /*! \fn void QLineF::setLine(qreal x1, qreal y1, qreal x2, qreal y2) \since 4.4 Sets this line to the start in \a x1, \a y1 and end in \a x2, \a y2. \sa setP1(), setP2(), p1(), p2() */ /*! \fn qreal QLineF::angleTo(const QLineF &line) const \since 4.4 Returns the angle (in degrees) from this line to the given \a line, taking the direction of the lines into account. If the lines do not intersect within their range, it is the intersection point of the extended lines that serves as origin (see QLineF::UnboundedIntersection). The returned value represents the number of degrees you need to add to this line to make it have the same angle as the given \a line, going counter-clockwise. \sa intersect() */ qreal QLineF::angleTo(const QLineF &l) const { if (isNull() || l.isNull()) return 0; const qreal a1 = angle(); const qreal a2 = l.angle(); const qreal delta = a2 - a1; const qreal delta_normalized = delta < 0 ? delta + 360 : delta; if (qFuzzyCompare(delta, qreal(360))) return 0; else return delta_normalized; } /*! \fn qreal QLineF::angle(const QLineF &line) const \obsolete Returns the angle (in degrees) between this line and the given \a line, taking the direction of the lines into account. If the lines do not intersect within their range, it is the intersection point of the extended lines that serves as origin (see QLineF::UnboundedIntersection). \table \row \li \inlineimage qlinef-angle-identicaldirection.png \li \inlineimage qlinef-angle-oppositedirection.png \endtable When the lines are parallel, this function returns 0 if they have the same direction; otherwise it returns 180. \sa intersect() */ qreal QLineF::angle(const QLineF &l) const { if (isNull() || l.isNull()) return 0; qreal cos_line = (dx()*l.dx() + dy()*l.dy()) / (length()*l.length()); qreal rad = 0; // only accept cos_line in the range [-1,1], if it is outside, use 0 (we return 0 rather than PI for those cases) if (cos_line >= -1.0 && cos_line <= 1.0) rad = qAcos( cos_line ); return rad * 360 / M_2PI; } #ifndef QT_NO_DEBUG_STREAM QDebug operator<<(QDebug dbg, const QLineF &p) { QDebugStateSaver saver(dbg); dbg.nospace() << "QLineF(" << p.p1() << ',' << p.p2() << ')'; return dbg; } #endif #ifndef QT_NO_DATASTREAM /*! \relates QLineF Writes the given \a line to the given \a stream and returns a reference to the stream. \sa {Serializing Qt Data Types} */ QDataStream &operator<<(QDataStream &stream, const QLineF &line) { stream << line.p1() << line.p2(); return stream; } /*! \relates QLineF Reads a line from the given \a stream into the given \a line and returns a reference to the stream. \sa {Serializing Qt Data Types} */ QDataStream &operator>>(QDataStream &stream, QLineF &line) { QPointF start, end; stream >> start; stream >> end; line = QLineF(start, end); return stream; } #endif // QT_NO_DATASTREAM QT_END_NAMESPACE
umjammer/tritonus
tritonus-alsa/src/main/java/org/tritonus/midi/device/alsa/AlsaMidiDeviceProvider.java
<filename>tritonus-alsa/src/main/java/org/tritonus/midi/device/alsa/AlsaMidiDeviceProvider.java /* * AlsaMidiDeviceProvider.java * * This file is part of Tritonus: http://www.tritonus.org/ */ /* * Copyright (c) 1999 - 2001 by <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* |<--- this code is formatted to fit into 80 columns --->| */ package org.tritonus.midi.device.alsa; import java.util.ArrayList; import java.util.List; import java.util.Iterator; import javax.sound.midi.MidiDevice; import javax.sound.midi.spi.MidiDeviceProvider; import org.tritonus.lowlevel.alsa.AlsaSeq; import org.tritonus.lowlevel.alsa.AlsaSeqClientInfo; import org.tritonus.lowlevel.alsa.AlsaSeqPortInfo; import org.tritonus.share.TDebug; public class AlsaMidiDeviceProvider extends MidiDeviceProvider { // perhaps move to superclass private static final MidiDevice.Info[] EMPTY_INFO_ARRAY = new MidiDevice.Info[0]; private static final int READ_CAPABILITY = AlsaSeq.SND_SEQ_PORT_CAP_READ | AlsaSeq.SND_SEQ_PORT_CAP_SUBS_READ; private static final int WRITE_CAPABILITY = AlsaSeq.SND_SEQ_PORT_CAP_WRITE | AlsaSeq.SND_SEQ_PORT_CAP_SUBS_WRITE; private static List<MidiDevice> m_devices; private static AlsaSeq m_alsaSeq; public AlsaMidiDeviceProvider() { if (TDebug.TraceMidiDeviceProvider) { TDebug.out("AlsaMidiDeviceProvider.<init>(): begin"); } synchronized (AlsaMidiDeviceProvider.class) { if (m_devices == null) { m_devices = new ArrayList<MidiDevice>(); if (TDebug.TraceMidiDeviceProvider) { TDebug.out("AlsaMidiDeviceProvider.<init>(): creating AlsaSeq..."); } // try{ m_alsaSeq = new AlsaSeq("Tritonus ALSA device manager"); // }catch (Throwable t) { TDebug.out(t); } if (TDebug.TraceMidiDeviceProvider) { TDebug.out("AlsaMidiDeviceProvider.<init>(): ...done"); } scanPorts(); } } if (TDebug.TraceMidiDeviceProvider) { TDebug.out("AlsaMidiDeviceProvider.<init>(): end"); } } public MidiDevice.Info[] getDeviceInfo() { if (TDebug.TraceMidiDeviceProvider) { TDebug.out("AlsaMidiDeviceProvider.getDeviceInfo(): begin"); } List<MidiDevice.Info> infoList = new ArrayList<MidiDevice.Info>(); Iterator<MidiDevice> iterator = m_devices.iterator(); while (iterator.hasNext()) { MidiDevice device = iterator.next(); MidiDevice.Info info = device.getDeviceInfo(); infoList.add(info); } MidiDevice.Info[] infos = infoList.toArray(EMPTY_INFO_ARRAY); if (TDebug.TraceMidiDeviceProvider) { TDebug.out("AlsaMidiDeviceProvider.getDeviceInfo(): end"); } return infos; } public MidiDevice getDevice(MidiDevice.Info info) { if (TDebug.TraceMidiDeviceProvider) { TDebug.out("AlsaMidiDeviceProvider.getDevice(): begin"); } MidiDevice returnedDevice = null; Iterator<MidiDevice> iterator = m_devices.iterator(); while (iterator.hasNext()) { MidiDevice device = iterator.next(); MidiDevice.Info info2 = device.getDeviceInfo(); if (info != null && info.equals(info2)) { returnedDevice = device; break; } } if (returnedDevice == null) { throw new IllegalArgumentException("no device for " + info); } if (TDebug.TraceMidiDeviceProvider) { TDebug.out("AlsaMidiDeviceProvider.getDevice(): end"); } return returnedDevice; } private void scanPorts() { if (TDebug.TraceMidiDeviceProvider || TDebug.TracePortScan) { TDebug.out("AlsaMidiDeviceProvider.scanPorts(): begin"); } Iterator clients = m_alsaSeq.getClientInfos(); while (clients.hasNext()) { AlsaSeqClientInfo clientInfo = (AlsaSeqClientInfo) clients.next(); int nClient = clientInfo.getClient(); if (TDebug.TracePortScan) { TDebug.out("AlsaMidiDeviceProvider.scanPorts(): client: " + nClient); } Iterator ports = m_alsaSeq.getPortInfos(nClient); while (ports.hasNext()) { AlsaSeqPortInfo portInfo = (AlsaSeqPortInfo) ports.next(); handlePort(clientInfo, portInfo); } } if (TDebug.TraceMidiDeviceProvider || TDebug.TracePortScan) { TDebug.out("AlsaMidiDeviceProvider.scanPorts(): end"); } } private void handlePort(AlsaSeqClientInfo clientInfo, AlsaSeqPortInfo portInfo) { int nClient = clientInfo.getClient(); int nPort = portInfo.getPort(); int nType = portInfo.getType(); int nCapability = portInfo.getCapability(); int nSynthVoices = portInfo.getSynthVoices(); if (TDebug.TracePortScan) { TDebug.out("AlsaMidiDeviceProvider.scanPorts(): port: " + nPort); TDebug.out("AlsaMidiDeviceProvider.scanPorts(): type: " + nType); TDebug.out("AlsaMidiDeviceProvider.scanPorts(): cap: " + nCapability); TDebug.out("AlsaMidiDeviceProvider.scanPorts(): midi channels: " + portInfo.getMidiChannels()); TDebug.out("AlsaMidiDeviceProvider.scanPorts(): midi voices: " + portInfo.getMidiVoices()); TDebug.out("AlsaMidiDeviceProvider.scanPorts(): synth voices: " + portInfo.getSynthVoices()); } if ((nType & AlsaSeq.SND_SEQ_PORT_TYPE_MIDI_GENERIC) != 0) { // TDebug.out("generic midi"); MidiDevice device = null; if ((nType & (AlsaSeq.SND_SEQ_PORT_TYPE_SYNTH | AlsaSeq.SND_SEQ_PORT_TYPE_DIRECT_SAMPLE | AlsaSeq.SND_SEQ_PORT_TYPE_SAMPLE)) != 0) { boolean bWriteSubscriptionAllowed = (nCapability & WRITE_CAPABILITY) == WRITE_CAPABILITY; if (bWriteSubscriptionAllowed) { device = new AlsaSynthesizer(nClient, nPort, nSynthVoices); } else { if (TDebug.TraceMidiDeviceProvider) { TDebug.out("AlsaMidiDeviceProvider.getDevice(): port does not allows write subscription, not used"); } } } else // ordinary midi port { boolean bReadSubscriptionAllowed = (nCapability & READ_CAPABILITY) == READ_CAPABILITY; boolean bWriteSubscriptionAllowed = (nCapability & WRITE_CAPABILITY) == WRITE_CAPABILITY; if (bReadSubscriptionAllowed || bWriteSubscriptionAllowed) { device = new AlsaMidiDevice(nClient, nPort, bReadSubscriptionAllowed, bWriteSubscriptionAllowed); } else { if (TDebug.TraceMidiDeviceProvider) { TDebug.out("AlsaMidiDeviceProvider.getDevice(): port allows neither read nor write subscription, not used"); } } } if (device != null) { m_devices.add(device); } } } } /*** AlsaMidiDeviceProvider.java ***/
codebyravi/otter
autoscale_cloudroast/test_repo/autoscale/functional/scheduler/test_list_scheduler_policies.py
<reponame>codebyravi/otter """ Test list scheduler policies (at and cron style). """ from test_repo.autoscale.fixtures import ScalingGroupPolicyFixture class ListSchedulerScalingPolicy(ScalingGroupPolicyFixture): """ Verify list scheduler policies. """ def setUp(self): """ Create 2 scheduler policies, one at-style and another cron-style on a scaling group with an existing webhook type policy """ super(ListSchedulerScalingPolicy, self).setUp() self.at_value = self.autoscale_behaviors.get_time_in_utc(600) self.cron_value = '0 */10 * * *' self.at_style_policy = (self.autoscale_behaviors .create_schedule_policy_given( group_id=self.group.id, sp_change=self.sp_change, schedule_at=self.at_value)) self.assertEquals( self.at_style_policy['status_code'], 201, msg='Create schedule policy (at style) failed with {0} for ' 'group {1}' .format(self.at_style_policy['status_code'], self.group.id)) self.cron_style_policy = (self.autoscale_behaviors .create_schedule_policy_given( group_id=self.group.id, sp_change=self.sp_change, schedule_cron=self.cron_value)) self.assertEquals( self.cron_style_policy['status_code'], 201, msg='Create schedule policy (cron style) failed with {0} for ' 'group {1}' .format(self.cron_style_policy['status_code'], self.group.id)) def test_list_scheduler_policies(self): """ Verify list scheduler policies' response code 200, headers and data. """ list_policies_resp = self.autoscale_client.list_policies(self.group.id) self.assertEquals( list_policies_resp.status_code, 200, msg='List for a policy failed with {0} for group {1}' .format(list_policies_resp.status_code, self.group.id)) self.validate_headers(list_policies_resp.headers) policy_id_list = [each_policy.id for each_policy in list_policies_resp.entity.policies] self.assertTrue(self.at_style_policy['id'] in policy_id_list) self.assertTrue(self.cron_style_policy['id'] in policy_id_list) self.assertTrue(self.policy['id'] in policy_id_list)
wilsonpage/gaia
apps/system/test/unit/value_picker_test.js
<filename>apps/system/test/unit/value_picker_test.js 'use strict'; requireApp('system/js/value_selector/value_picker.js'); suite('value selector/value picker', function() { var subject, stubByQuery, monthUnitStyle; teardown(function() { stubByQuery.restore(); }); setup(function() { stubByQuery = this.sinon.stub(document, 'querySelector') .returns(document.createElement('div'). appendChild(document.createElement('div'))); // month value picker monthUnitStyle = { valueDisplayedText: ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'], className: 'value-picker-month' }; var monthPickerContainer = document.querySelector('.value-picker-month'); subject = new ValuePicker(monthPickerContainer, monthUnitStyle); }); test('called querySelector', function() { assert.isTrue(stubByQuery.withArgs('.value-picker-month').calledOnce); assert.isFalse(stubByQuery.withArgs('.value-picker-year').calledOnce); }); test('getSelectedIndex', function() { subject._currentIndex = 2; assert.equal(subject.getSelectedIndex(), 2); }); test('setSelectedIndex', function() { var currentlySelected = subject.element.querySelector('.selected'); subject.setSelectedIndex(3); assert.isFalse(currentlySelected.classList.contains('selected')); assert.equal(subject.element.querySelector('.selected').textContent, monthUnitStyle.valueDisplayedText[3]); }); test('getSelectedDisplayedText', function() { assert.equal(subject.getSelectedDisplayedText(), monthUnitStyle.valueDisplayedText[0]); }); test('setSelectedDisplayedText', function() { subject._currentIndex = 3; assert.equal(subject.getSelectedDisplayedText(), monthUnitStyle.valueDisplayedText[3]); }); test('setRange', function() { subject.setRange(0, 11); assert.equal(subject._lower, 0); assert.equal(subject._upper, 11); }); });
egpbos/egp
egp/__version__.py
<gh_stars>0 __version__ = '0.1.0' __author__ = "<NAME>. <NAME>" __email__ = '<EMAIL>'
benhunter/ctf
htb/fatty-10.10.10.174/fatty-client/org/springframework/beans/factory/parsing/PropertyEntry.java
/* */ package org.springframework.beans.factory.parsing; /* */ /* */ import org.springframework.util.StringUtils; /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ /* */ public class PropertyEntry /* */ implements ParseState.Entry /* */ { /* */ private final String name; /* */ /* */ public PropertyEntry(String name) { /* 39 */ if (!StringUtils.hasText(name)) { /* 40 */ throw new IllegalArgumentException("Invalid property name '" + name + "'."); /* */ } /* 42 */ this.name = name; /* */ } /* */ /* */ /* */ /* */ public String toString() { /* 48 */ return "Property '" + this.name + "'"; /* */ } /* */ } /* Location: /home/kali/ctf/htb/fatty-10.10.10.174/ftp/fatty-client.jar!/org/springframework/beans/factory/parsing/PropertyEntry.class * Java compiler version: 8 (52.0) * JD-Core Version: 1.1.3 */
bonitobonita24/Mayan-EDMS
mayan/apps/appearance/tests/test_views.py
from unittest import skip from selenium.common.exceptions import NoAlertPresentException from django.conf import settings from django.contrib.staticfiles.testing import StaticLiveServerTestCase from mayan.apps.user_management.permissions import ( permission_user_edit, permission_user_view ) from mayan.apps.testing.tests.base import GenericViewTestCase from mayan.apps.testing.tests.mixins import SeleniumTestMixin from ..events import ( event_theme_created, event_theme_edited, event_user_theme_settings_edited ) from ..models import Theme from ..permissions import ( permission_theme_create, permission_theme_delete, permission_theme_edit, permission_theme_view ) from .mixins import ( ThemeTestMixin, ThemeViewTestMixin, UserThemeSettingsViewTestMixin ) @skip('Skip until a synchronous live server class is added.') class BasePlainViewTestCase( SeleniumTestMixin, StaticLiveServerTestCase, GenericViewTestCase ): auto_add_test_view = True test_view_url = r'^javascript:alert\("XSS"\)/$' test_view_is_public = True test_view_template = 'javascript_view' def test_login_view_url_fragment_xss(self): # Should redirect and not display an alert self._open_url( fragment='#javascript:alert("XSS")', viewname=settings.LOGIN_URL ) with self.assertRaises(expected_exception=NoAlertPresentException): self.webdriver.switch_to_alert() def test_login_view_url_redirect(self): self._open_url( fragment='#javascript:alert("XSS")', viewname=settings.LOGIN_URL ) self.assertTrue(self.test_view_template in self.webdriver.page_source) class ThemeViewTestCase( ThemeTestMixin, ThemeViewTestMixin, GenericViewTestCase ): def test_theme_create_view_no_permission(self): theme_count = Theme.objects.count() self._clear_events() response = self._request_test_theme_create_view() self.assertEqual(response.status_code, 403) self.assertEqual(Theme.objects.count(), theme_count) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_theme_create_view_with_permissions(self): self.grant_permission(permission=permission_theme_create) theme_count = Theme.objects.count() self._clear_events() response = self._request_test_theme_create_view() self.assertEqual(response.status_code, 302) self.assertEqual(Theme.objects.count(), theme_count + 1) events = self._get_test_events() self.assertEqual(events.count(), 1) self.assertEqual(events[0].action_object, None) self.assertEqual(events[0].actor, self._test_case_user) self.assertEqual(events[0].target, self.test_theme) self.assertEqual(events[0].verb, event_theme_created.id) def test_theme_delete_view_no_permission(self): self._create_test_theme() theme_count = Theme.objects.count() self._clear_events() response = self._request_test_theme_delete_view() self.assertEqual(response.status_code, 404) self.assertEqual(Theme.objects.count(), theme_count) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_theme_delete_view_with_access(self): self._create_test_theme() self.grant_access( obj=self.test_theme, permission=permission_theme_delete ) theme_count = Theme.objects.count() self._clear_events() response = self._request_test_theme_delete_view() self.assertEqual(response.status_code, 302) self.assertEqual(Theme.objects.count(), theme_count - 1) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_theme_edit_view_no_permission(self): self._create_test_theme() theme_label = self.test_theme.label self._clear_events() response = self._request_test_theme_edit_view() self.assertEqual(response.status_code, 404) self.test_theme.refresh_from_db() self.assertEqual(self.test_theme.label, theme_label) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_theme_edit_view_with_access(self): self._create_test_theme() self.grant_access( obj=self.test_theme, permission=permission_theme_edit ) theme_label = self.test_theme.label self._clear_events() response = self._request_test_theme_edit_view() self.assertEqual(response.status_code, 302) self.test_theme.refresh_from_db() self.assertNotEqual(self.test_theme.label, theme_label) events = self._get_test_events() self.assertEqual(events.count(), 1) self.assertEqual(events[0].action_object, None) self.assertEqual(events[0].actor, self._test_case_user) self.assertEqual(events[0].target, self.test_theme) self.assertEqual(events[0].verb, event_theme_edited.id) def test_theme_list_view_with_no_permission(self): self._create_test_theme() self._clear_events() response = self._request_test_theme_list_view() self.assertNotContains( response=response, text=self.test_theme.label, status_code=200 ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_theme_list_view_with_access(self): self._create_test_theme() self.grant_access( obj=self.test_theme, permission=permission_theme_view ) self._clear_events() response = self._request_test_theme_list_view() self.assertContains( response=response, text=self.test_theme.label, status_code=200 ) events = self._get_test_events() self.assertEqual(events.count(), 0) class ThemeEffectViewsTestCase(ThemeTestMixin, GenericViewTestCase): def test_normal_view_after_theme_delete(self): self._create_test_theme() self._test_case_user.theme_settings.theme = self.test_theme self._test_case_user.theme_settings.save() self.test_theme.delete() response = self.get(viewname='common:about_view') self.assertEqual(response.status_code, 200) class CurrentUserViewTestCase( ThemeTestMixin, UserThemeSettingsViewTestMixin, GenericViewTestCase ): def setUp(self): super().setUp() self._create_test_theme() def test_current_user_theme_settings_detail_view_no_permission(self): self._clear_events() response = self._request_test_current_user_theme_settings_detail_view() self.assertEqual(response.status_code, 200) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_current_user_theme_settings_edit_view_no_permission(self): theme = self._test_case_user.theme_settings.theme self._clear_events() response = self._request_test_current_user_theme_settings_edit_view() self.assertEqual(response.status_code, 302) self._test_case_user.refresh_from_db() self.assertNotEqual(self._test_case_user.theme_settings.theme, theme) events = self._get_test_events() self.assertEqual(events.count(), 1) self.assertEqual(events[0].action_object, None) self.assertEqual(events[0].actor, self._test_case_user) self.assertEqual(events[0].target, self._test_case_user) self.assertEqual(events[0].verb, event_user_theme_settings_edited.id) class SuperUserThemeSettingsViewTestCase( ThemeTestMixin, UserThemeSettingsViewTestMixin, GenericViewTestCase ): def setUp(self): super().setUp() self._create_test_superuser() self._create_test_theme() def test_superuser_theme_settings_detail_view_no_permission(self): self._clear_events() response = self._request_test_superuser_theme_settings_detail_view() self.assertEqual(response.status_code, 404) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_superuser_theme_settings_detail_view_with_access(self): self.grant_access( obj=self.test_superuser, permission=permission_user_view ) self._clear_events() response = self._request_test_superuser_theme_settings_detail_view() self.assertEqual(response.status_code, 404) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_superuser_theme_settings_edit_view_no_permission(self): theme = self.test_superuser.theme_settings.theme self._clear_events() response = self._request_test_superuser_theme_settings_edit_view() self.assertEqual(response.status_code, 404) self.test_superuser.refresh_from_db() self.assertEqual(self.test_superuser.theme_settings.theme, theme) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_superuser_theme_settings_edit_view_with_access(self): theme = self.test_superuser.theme_settings.theme self.grant_access( obj=self.test_superuser, permission=permission_user_edit ) self._clear_events() response = self._request_test_superuser_theme_settings_edit_view() self.assertEqual(response.status_code, 404) self.test_superuser.refresh_from_db() self.assertEqual(self.test_superuser.theme_settings.theme, theme) events = self._get_test_events() self.assertEqual(events.count(), 0) class UserThemeSettingsViewTestCase( ThemeTestMixin, UserThemeSettingsViewTestMixin, GenericViewTestCase ): auto_create_test_user = True def setUp(self): super().setUp() self._create_test_theme() def test_user_theme_settings_detail_view_no_permission(self): self._clear_events() response = self._request_test_user_theme_settings_detail_view() self.assertEqual(response.status_code, 404) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_user_theme_settings_detail_view_with_access(self): self.grant_access( obj=self.test_user, permission=permission_user_view ) self._clear_events() response = self._request_test_user_theme_settings_detail_view() self.assertEqual(response.status_code, 200) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_user_theme_settings_edit_view_no_permission(self): theme = self.test_user.theme_settings.theme self._clear_events() response = self._request_test_user_theme_settings_edit_view() self.assertEqual(response.status_code, 404) self.test_user.refresh_from_db() self.assertEqual(self.test_user.theme_settings.theme, theme) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_user_theme_settings_edit_view_with_access(self): theme = self.test_user.theme_settings.theme self.grant_access( obj=self.test_user, permission=permission_user_edit ) self._clear_events() response = self._request_test_user_theme_settings_edit_view() self.assertEqual(response.status_code, 302) self.test_user.refresh_from_db() self.assertNotEqual(self.test_user.theme_settings.theme, theme) events = self._get_test_events() self.assertEqual(events.count(), 1) self.assertEqual(events[0].action_object, None) self.assertEqual(events[0].actor, self._test_case_user) self.assertEqual(events[0].target, self.test_user) self.assertEqual(events[0].verb, event_user_theme_settings_edited.id)
MM0x00/Stowaway
admin/process/children.go
<gh_stars>1-10 package process import ( "Stowaway/admin/manager" "Stowaway/admin/printer" "Stowaway/admin/topology" "Stowaway/protocol" ) func nodeOffline(mgr *manager.Manager, topo *topology.Topology, uuid string) { topoTask := &topology.TopoTask{ Mode: topology.DELNODE, UUID: uuid, } topo.TaskChan <- topoTask result := <-topo.ResultChan allNodes := result.AllNodes for _, nodeUUID := range allNodes { backwardTask := &manager.BackwardTask{ Mode: manager.B_FORCESHUTDOWN, UUID: nodeUUID, } mgr.BackwardManager.TaskChan <- backwardTask <-mgr.BackwardManager.ResultChan forwardTask := &manager.ForwardTask{ Mode: manager.F_FORCESHUTDOWN, UUID: nodeUUID, } mgr.ForwardManager.TaskChan <- forwardTask <-mgr.ForwardManager.ResultChan socksTask := &manager.SocksTask{ Mode: manager.S_FORCESHUTDOWN, UUID: nodeUUID, } mgr.SocksManager.TaskChan <- socksTask <-mgr.SocksManager.ResultChan } topoTask = &topology.TopoTask{ Mode: topology.CALCULATE, } topo.TaskChan <- topoTask <-topo.ResultChan } func nodeReonline(mgr *manager.Manager, topo *topology.Topology, mess *protocol.NodeReonline) { node := topology.NewNode(mess.UUID, mess.IP) topoTask := &topology.TopoTask{ Mode: topology.REONLINENODE, Target: node, ParentUUID: mess.ParentUUID, IsFirst: false, } topo.TaskChan <- topoTask <-topo.ResultChan topoTask = &topology.TopoTask{ Mode: topology.CALCULATE, } topo.TaskChan <- topoTask <-topo.ResultChan topoTask = &topology.TopoTask{ Mode: topology.GETUUIDNUM, UUID: mess.UUID, } topo.TaskChan <- topoTask result := <-topo.ResultChan printer.Success("\r\n[*] Node %d is reonline!", result.IDNum) } func DispatchChildrenMess(mgr *manager.Manager, topo *topology.Topology) { for { message := <-mgr.ChildrenManager.ChildrenMessChan switch message.(type) { case *protocol.NodeOffline: mess := message.(*protocol.NodeOffline) nodeOffline(mgr, topo, mess.UUID) case *protocol.NodeReonline: mess := message.(*protocol.NodeReonline) nodeReonline(mgr, topo, mess) } } }
jjhenkel/dockerizeme
hard-gists/1935808/snippet.py
"""Simple utility script for semi-gracefully downgrading v3 notebooks to v2""" import io import os import sys from IPython.nbformat import current def heading_to_md(cell): """turn heading cell into corresponding markdown""" cell.cell_type = "markdown" level = cell.pop('level', 1) cell.source = '#'*level + ' ' + cell.source def raw_to_md(cell): """let raw passthrough as markdown""" cell.cell_type = "markdown" def downgrade(nb): """downgrade a v3 notebook to v2""" if nb.nbformat != 3: return nb nb.nbformat = 2 for ws in nb.worksheets: for cell in ws.cells: if cell.cell_type == 'heading': heading_to_md(cell) elif cell.cell_type == 'raw': raw_to_md(cell) return nb def downgrade_ipynb(fname): base, ext = os.path.splitext(fname) newname = base+'.v2'+ext print "downgrading %s -> %s" % (fname, newname) with io.open(fname, 'r', encoding='utf8') as f: nb = current.read(f, 'json') nb = downgrade(nb) with open(newname, 'w') as f: current.write(nb, f, 'json') if __name__ == '__main__': map(downgrade_ipynb, sys.argv[1:])
spr-networks/super
frontend/src/api/API.js
export const apiURL = () => { const { REACT_APP_API } = process.env if (REACT_APP_API) { try { let url = new URL(REACT_APP_API) //console.log('[API] using base:' + url) return url.toString() } catch (e) { // REACT_APP_API=mock -- dont load in prod let MockAPI = import('../api/MockAPI').then((m) => m.default()) return '/' } } return document.location.origin + '/' } //request helper class API { baseURL = '' authHeaders = '' constructor(baseURL = '') { this.baseURL = apiURL() + baseURL.replace(/^\/+/, '') this.authHeaders = this.getAuthHeaders() } // reads from localStorage if no username provided getAuthHeaders(username = null, password = <PASSWORD>) { if (username && password) { return 'Basic ' + btoa(username + ':' + password) } let user = JSON.parse(localStorage.getItem('user')) return user && user.authdata ? 'Basic ' + user.authdata : '' } setAuthHeaders(username = '', password = '') { this.authHeaders = 'Basic ' + btoa(username + ':' + password) } request(method = 'GET', url, body) { if (!this.authHeaders) { this.authHeaders = this.getAuthHeaders() } let headers = { Authorization: this.authHeaders, 'X-Requested-With': 'react', 'Content-Type': 'application/json' } let opts = { method, headers } if (body) { opts.body = JSON.stringify(body) } let baseURL = this.baseURL // get rid of // if (url[0] == '/' && baseURL.length && baseURL[baseURL.length - 1] == '/') { url = url.substr(1) } // if forced to not return data let skipReturnValue = method == 'DELETE' let _url = `${baseURL}${url}` //console.log('[API] fetch', _url, 'Authorization:', this.authHeaders) //console.log(`[API] fetch method=`, method, 'skip=', skipReturnValue) return fetch(_url, opts).then((response) => { if (!response.ok) { return Promise.reject({ message: response.status }) } const contentType = response.headers.get('Content-Type') if (!contentType || skipReturnValue) { return Promise.resolve(true) } if (contentType.includes('application/json')) { return response.json() } else if (contentType.includes('text/plain')) { return response.text() } return Promise.reject({ message: 'unknown Content-Type' }) }) } get(url) { return this.request('GET', url) } put(url, data) { return this.request('PUT', url, data) } delete(url, data) { return this.request('DELETE', url, data) } } export default API export const api = new API() export const testLogin = (username, password, callback) => { api.setAuthHeaders(username, password) api .get('/status') .then((data) => { return callback(data == 'Online') }) .catch((error) => callback(false, error)) } export const saveLogin = (username, password) => { localStorage.setItem( 'user', JSON.stringify({ authdata: btoa(username + ':' + password), username: username, password: password }) ) }
codingfriend1/meanbase
src/hooks/allow-upsert.js
import errors from 'feathers-errors' export default options => { return hook => { if(hook.params) { hook.params = Object.assign({}, hook.params, {mongoose: {upsert: true} }) } } }
SaeidSamadi/mc_rtc
src/mc_observers/BodySensorObserver.cpp
/* * Copyright 2015-2019 CNRS-UM LIRMM, CNRS-AIST JRL */ #include <mc_observers/BodySensorObserver.h> #include <mc_observers/ObserverMacros.h> #include <mc_control/MCController.h> #include <mc_rtc/gui/Arrow.h> namespace mc_observers { BodySensorObserver::BodySensorObserver(const std::string & name, double dt, const mc_rtc::Configuration & config) : Observer(name, dt, config) { auto updateConfig = config("UpdateFrom", std::string{}); if(!updateConfig.empty()) { if(updateConfig == "estimator") { updateFrom_ = Update::Estimator; } else { updateFrom_ = Update::Control; } } fbSensorName_ = config("FloatingBaseSensor", std::string("FloatingBase")); desc_ = name_ + " (sensor=" + fbSensorName_ + ",update=" + updateConfig + ")"; } void BodySensorObserver::reset(const mc_control::MCController & ctl) { if(updateFrom_ == Update::Estimator) { if(!ctl.robot().hasBodySensor(fbSensorName_)) { LOG_ERROR_AND_THROW(std::runtime_error, "[BodySensorObserver] Bodysensor " << fbSensorName_ << " is requested but does not exist in robot " << ctl.robot().name()); } } run(ctl); } bool BodySensorObserver::run(const mc_control::MCController & ctl) { const auto & robot = ctl.robot(); const auto & realRobot = ctl.realRobot(); if(updateFrom_ == Update::Estimator) { // Update free flyer from body sensor // Note that if the body to which the sensor is attached is not the // floating base, the kinematic transformation between that body and the // floating base is used to obtain the floating base pose. // It is assumed here that the floating base sensor and encoders are // synchronized. const auto & sensor = robot.bodySensor(fbSensorName_); const auto & fb = realRobot.mb().body(0).name(); sva::PTransformd X_0_s(sensor.orientation(), sensor.position()); const auto X_s_b = sensor.X_b_s().inv(); sva::PTransformd X_b_fb = realRobot.X_b1_b2(sensor.parentBody(), fb); sva::PTransformd X_s_fb = X_b_fb * X_s_b; posW_ = X_s_fb * X_0_s; sva::MotionVecd sensorVel(sensor.angularVelocity(), sensor.linearVelocity()); velW_ = X_s_fb * sensorVel; } else /* if(updateFrom_ == Update::Control) */ { posW_ = robot.posW(); velW_ = robot.velW(); } return true; } void BodySensorObserver::updateRobots(const mc_control::MCController & /* ctl */, mc_rbdyn::Robots & realRobots) { realRobots.robot().posW(posW_); realRobots.robot().velW(velW_); } void BodySensorObserver::addToLogger(const mc_control::MCController & ctl, mc_rtc::Logger & logger) { Observer::addToLogger(ctl, logger); logger.addLogEntry("observer_" + name() + "_posW", [this]() { return posW_; }); logger.addLogEntry("observer_" + name() + "_velW", [this]() { return velW_; }); } void BodySensorObserver::removeFromLogger(mc_rtc::Logger & logger) { Observer::removeFromLogger(logger); logger.removeLogEntry("observer_" + name() + "_posW"); logger.removeLogEntry("observer_" + name() + "_velW"); } void BodySensorObserver::addToGUI(const mc_control::MCController &, mc_rtc::gui::StateBuilder & gui) { gui.addElement({"Observers", name()}, mc_rtc::gui::Arrow("Velocity", mc_rtc::gui::ArrowConfig(mc_rtc::gui::Color{1., 0., 0.}), [this]() -> const Eigen::Vector3d & { return posW_.translation(); }, [this]() -> Eigen::Vector3d { Eigen::Vector3d end = posW_.translation() + velW_.linear(); return end; })); } } // namespace mc_observers EXPORT_OBSERVER_MODULE("BodySensor", mc_observers::BodySensorObserver)
cokeSchlumpf/ada
ada-vcs/src/main/java/ada/domain/dvc/values/repository/VersionState.java
package ada.domain.dvc.values.repository; public enum VersionState { INITIALIZED, PUSHED, ANALYZING, ANALYZED }
jodi-one/jod
jodi_odi12/src/main/java/oracle/odi/domain/project/ProcedureOptionBuilder.java
package oracle.odi.domain.project; import one.jodi.etl.internalmodel.procedure.OptionInternal; public interface ProcedureOptionBuilder { void build(OdiUserProcedure userProcedure, OptionInternal optionInternal); }
yupengKenny/baw-server
app/modules/set_current.rb
<reponame>yupengKenny/baw-server # frozen_string_literal: true # Sets current values for the current model. module SetCurrent extend ActiveSupport::Concern included do before_action :set_current_user before_action :set_current_ability end private def set_current_user Current.user = current_user end def set_current_ability Current.ability = current_ability end end
kepozu/Jockey
app/src/main/java/com/marverenic/music/player/extensions/scrobbler/ScrobblerExtension.java
<filename>app/src/main/java/com/marverenic/music/player/extensions/scrobbler/ScrobblerExtension.java package com.marverenic.music.player.extensions.scrobbler; import android.content.Context; import com.marverenic.music.data.store.ReadOnlyPreferenceStore; import com.marverenic.music.model.Song; import com.marverenic.music.player.MusicPlayer; import com.marverenic.music.player.extensions.MusicPlayerExtension; public class ScrobblerExtension extends MusicPlayerExtension { private boolean mEnabled; private SlsMessenger mMessenger; public ScrobblerExtension(Context context) { mMessenger = new SlsMessenger(context); } @Override public void onCreateMusicPlayer(MusicPlayer musicPlayer, ReadOnlyPreferenceStore preferences) { onSettingsChanged(preferences); } @Override public void onSettingsChanged(ReadOnlyPreferenceStore preferences) { mEnabled = preferences.getEqualizerEnabled(); } @Override public void onSongStarted(MusicPlayer musicPlayer) { if (mEnabled) { mMessenger.sendStart(musicPlayer.getNowPlaying()); } } @Override public void onSongCompleted(MusicPlayer musicPlayer, Song completed) { if (mEnabled) { mMessenger.sendComplete(completed); } } @Override public void onSongPaused(MusicPlayer musicPlayer) { if (mEnabled) { mMessenger.sendPause(musicPlayer.getNowPlaying()); } } @Override public void onSongResumed(MusicPlayer musicPlayer) { if (mEnabled) { mMessenger.sendResume(musicPlayer.getNowPlaying()); } } }
marvec/SilverWare
hystrix-microservice-provider/src/test/java/io/silverware/microservices/providers/hystrix/execution/MicroserviceHystrixCommandTest.java
<reponame>marvec/SilverWare<filename>hystrix-microservice-provider/src/test/java/io/silverware/microservices/providers/hystrix/execution/MicroserviceHystrixCommandTest.java /* * -----------------------------------------------------------------------\ * SilverWare *   * Copyright (C) 2016 the original author or authors. *   * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * -----------------------------------------------------------------------/ */ package io.silverware.microservices.providers.hystrix.execution; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.netflix.hystrix.HystrixCommand; import com.netflix.hystrix.HystrixCommand.Setter; import com.netflix.hystrix.HystrixCommandGroupKey; import com.netflix.hystrix.HystrixCommandProperties; import com.netflix.hystrix.exception.HystrixBadRequestException; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import org.testng.annotations.Test; import java.util.Collections; import java.util.Set; import java.util.concurrent.Callable; public class MicroserviceHystrixCommandTest extends HystrixTestBase { private static final Logger log = LogManager.getLogger(MicroserviceHystrixCommandTest.class); private static final HystrixCommandGroupKey GROUP_KEY = HystrixCommandGroupKey.Factory.asKey("TestingGroup"); private static final int ERROR_THRESHOLD_PERCENTAGE = 50; private static final int REQUEST_VOLUME_THRESHOLD = 5; private static final int SLEEP_WINDOW_IN_MILLISECONDS = 800; private boolean circuitClosed = false; @Test public void testPrimaryCall() { Boolean result = executeFailingCommand(false); assertThat(result).as("Callable has not been executed.").isNotNull().isTrue(); } @Test public void testFallback() { Boolean result = executeFailingCommand(true); assertThat(result).as("Fallback has not been executed.").isNotNull().isFalse(); } @Test public void testOpenAndCloseCircuit() throws InterruptedException { for (int i = 0; i < REQUEST_VOLUME_THRESHOLD; i++) { log.debug(i + ": circuit closed"); executeFailingCommand(true); assertCircuitClosed(); Thread.sleep(200); } for (int i = 0; i < REQUEST_VOLUME_THRESHOLD; i++) { log.debug(i + ": circuit open"); executeFailingCommand(false); assertCircuitOpen(); } Thread.sleep(SLEEP_WINDOW_IN_MILLISECONDS); executeFailingCommand(false); assertCircuitClosed(); } @Test public void testRequestCaching() { HystrixCommand<Integer> command; command = createCachedCommand(1); assertThat(command.execute()).isEqualTo(1); assertThat(command.isResponseFromCache()).isFalse(); command = createCachedCommand(2); assertThat(command.execute()).isEqualTo(2); assertThat(command.isResponseFromCache()).isFalse(); command = createCachedCommand(1); assertThat(command.execute()).isEqualTo(1); assertThat(command.isResponseFromCache()).isTrue(); } @Test public void testIgnoredExceptions() { assertThatThrownBy(() -> executeFailingCommand(true, Collections.singleton(FailingCallException.class))) .isInstanceOf(HystrixBadRequestException.class) .hasCauseInstanceOf(FailingCallException.class); } @Test public void testIgnoredExceptionsSuperType() { assertThatThrownBy(() -> executeFailingCommand(true, Collections.singleton(RuntimeException.class))) .isInstanceOf(HystrixBadRequestException.class) .hasCauseInstanceOf(FailingCallException.class); } @Test public void testIgnoredExceptionsOther() { boolean result = executeFailingCommand(true, Collections.singleton(IllegalArgumentException.class)); assertThat(result).as("Exception should have been thrown and caught").isFalse(); } private Boolean executeFailingCommand(boolean fail) { return executeFailingCommand(fail, Collections.emptySet()); } private Boolean executeFailingCommand(boolean fail, Set<Class<? extends Throwable>> ignoredExceptions) { circuitClosed = false; HystrixCommandProperties.Setter commandProperties = HystrixCommandProperties.Setter(); commandProperties.withCircuitBreakerEnabled(true) .withCircuitBreakerErrorThresholdPercentage(ERROR_THRESHOLD_PERCENTAGE) .withCircuitBreakerRequestVolumeThreshold(REQUEST_VOLUME_THRESHOLD) .withCircuitBreakerSleepWindowInMilliseconds(SLEEP_WINDOW_IN_MILLISECONDS); HystrixCommand.Setter setter = Setter.withGroupKey(GROUP_KEY) .andCommandPropertiesDefaults(commandProperties); Callable<Boolean> callable = () -> { circuitClosed = true; if (fail) { throw new FailingCallException(); } else { return true; } }; Callable<Boolean> fallback = () -> false; MicroserviceHystrixCommand<Boolean> command = new MicroserviceHystrixCommand.Builder<>(setter, callable) .fallback(fallback) .ignoredExceptions(ignoredExceptions) .build(); return command.execute(); } private HystrixCommand<Integer> createCachedCommand(int number) { HystrixCommandProperties.Setter commandProperties = HystrixCommandProperties.Setter(); commandProperties.withRequestCacheEnabled(true); HystrixCommand.Setter setter = Setter.withGroupKey(GROUP_KEY) .andCommandPropertiesDefaults(commandProperties); Callable<Integer> callable = () -> number; Callable<Integer> fallback = () -> 0; return new MicroserviceHystrixCommand.Builder<>(setter, callable) .fallback(fallback) .cacheKey(String.valueOf(number)) .build(); } private void assertCircuitClosed() { assertThat(circuitClosed).as("Circuit should have been closed").isTrue(); } private void assertCircuitOpen() { assertThat(circuitClosed).as("Circuit should have been open").isFalse(); } }
HelionDevPlatform/bloodhound
trac/trac/prefs/tests/functional.py
<gh_stars>10-100 #!/usr/bin/python from trac.tests.functional import * #TODO: split this into multiple smaller testcases class TestPreferences(FunctionalTwillTestCaseSetup): def runTest(self): """Set preferences for admin user""" prefs_url = self._tester.url + "/prefs" tc.follow('Preferences') tc.url(prefs_url) tc.notfind('Your preferences have been saved.') tc.formvalue('userprefs', 'name', ' System Administrator ') tc.formvalue('userprefs', 'email', ' <EMAIL> ') tc.submit() tc.find('Your preferences have been saved.') tc.follow('Date & Time') tc.url(prefs_url + '/datetime') tc.formvalue('userprefs', 'tz', 'GMT -10:00') tc.submit() tc.find('Your preferences have been saved.') tc.follow('General') tc.url(prefs_url) tc.notfind('Your preferences have been saved.') tc.find('value="System Administrator"') tc.find(r'value="<EMAIL>"') tc.follow('Date & Time') tc.url(prefs_url + '/datetime') tc.find('GMT -10:00') class RegressionTestRev5785(FunctionalTwillTestCaseSetup): def runTest(self): """Test for regression of the fix in r5785""" prefs_url = self._tester.url + "/prefs" tc.follow('Preferences') tc.url(prefs_url) tc.follow('Logout') tc.notfind(internal_error) # See [5785] tc.follow('Login') class RegressionTestTicket5765(FunctionalTwillTestCaseSetup): def runTest(self): """Test for regression of http://trac.edgewall.org/ticket/5765 Unable to turn off 'Enable access keys' in Preferences """ self._tester.go_to_front() tc.follow('Preferences') tc.follow('Keyboard Shortcuts') tc.formvalue('userprefs', 'accesskeys', True) tc.submit() tc.find('name="accesskeys".*checked="checked"') tc.formvalue('userprefs', 'accesskeys', False) tc.submit() tc.notfind('name="accesskeys".*checked="checked"') def functionalSuite(suite=None): if not suite: import trac.tests.functional.testcases suite = trac.tests.functional.testcases.functionalSuite() suite.addTest(TestPreferences()) suite.addTest(RegressionTestRev5785()) suite.addTest(RegressionTestTicket5765()) return suite if __name__ == '__main__': unittest.main(defaultTest='functionalSuite')
chandaweia/onvm-mos-new
core/src/include/memory_mgt.h
<gh_stars>10-100 #ifndef __MEMORY_MGT_H_ #define __MEMORY_MGT_H_ struct mem_pool; typedef struct mem_pool* mem_pool_t; /* create a memory pool with a chunk size and total size an return the pointer to the memory pool */ mem_pool_t MPCreate(int chunk_size, size_t total_size, int is_hugepage); /* allocate one chunk */ void *MPAllocateChunk(mem_pool_t mp); /* free one chunk */ void MPFreeChunk(mem_pool_t mp, void *p); /* destroy the memory pool */ void MPDestroy(mem_pool_t mp); /* return the number of free chunks */ int MPGetFreeChunks(mem_pool_t mp); #endif /* __MEMORY_MGT_H_ */
naz013/Reminder
app/src/main/java/com/elementary/tasks/core/utils/UriUtil.java
package com.elementary.tasks.core.utils; import android.content.Context; import android.net.Uri; import androidx.annotation.NonNull; import androidx.core.content.FileProvider; import com.elementary.tasks.BuildConfig; import java.io.File; import timber.log.Timber; /** * Copyright 2017 <NAME> * <p/> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public final class UriUtil { @NonNull public static Uri getUri(Context context, @NonNull String filePath) { Timber.d("getUri: %s", BuildConfig.APPLICATION_ID); if (Module.isNougat()) { return FileProvider.getUriForFile(context, BuildConfig.APPLICATION_ID + ".provider", new File(filePath)); } else { return Uri.fromFile(new File(filePath)); } } @NonNull public static Uri getUri(Context context, @NonNull File file) { Timber.d("getUri: %s", BuildConfig.APPLICATION_ID); if (Module.isNougat()) { return FileProvider.getUriForFile(context, BuildConfig.APPLICATION_ID + ".provider", file); } else { return Uri.fromFile(file); } } }
melvinhgf/instantsearch.js
src/connectors/numeric-menu/connectNumericMenu.js
<gh_stars>0 import { checkRendering, createDocumentationMessageGenerator, isFiniteNumber, noop, } from '../../lib/utils'; const withUsage = createDocumentationMessageGenerator({ name: 'numeric-menu', connector: true, }); /** * @typedef {Object} NumericMenuOption * @property {string} name Name of the option. * @property {number} start Lower bound of the option (>=). * @property {number} end Higher bound of the option (<=). */ /** * @typedef {Object} NumericMenuItem * @property {string} label Name of the option. * @property {string} value URL encoded of the bounds object with the form `{start, end}`. This value can be used verbatim in the webpage and can be read by `refine` directly. If you want to inspect the value, you can do `JSON.parse(window.decodeURI(value))` to get the object. * @property {boolean} isRefined True if the value is selected. */ /** * @typedef {Object} CustomNumericMenuWidgetOptions * @property {string} attribute Name of the attribute for filtering. * @property {NumericMenuOption[]} items List of all the items. * @property {function(object[]):object[]} [transformItems] Function to transform the items passed to the templates. */ /** * @typedef {Object} NumericMenuRenderingOptions * @property {function(item.value): string} createURL Creates URLs for the next state, the string is the name of the selected option. * @property {NumericMenuItem[]} items The list of available choices. * @property {boolean} hasNoResults `true` if the last search contains no result. * @property {function(item.value)} refine Sets the selected value and trigger a new search. * @property {Object} widgetParams All original `CustomNumericMenuWidgetOptions` forwarded to the `renderFn`. */ /** * **NumericMenu** connector provides the logic to build a custom widget that will give the user the ability to choose a range on to refine the search results. * * It provides a `refine(item)` function to refine on the selected range. * * **Requirement:** the attribute passed as `attribute` must be present in "attributes for faceting" on the Algolia dashboard or configured as attributesForFaceting via a set settings call to the Algolia API. * @function connectNumericMenu * @type {Connector} * @param {function(NumericMenuRenderingOptions, boolean)} renderFn Rendering function for the custom **NumericMenu** widget. * @param {function} unmountFn Unmount function called when the widget is disposed. * @return {function(CustomNumericMenuWidgetOptions)} Re-usable widget factory for a custom **NumericMenu** widget. * @example * // custom `renderFn` to render the custom NumericMenu widget * function renderFn(NumericMenuRenderingOptions, isFirstRendering) { * if (isFirstRendering) { * NumericMenuRenderingOptions.widgetParams.containerNode.html('<ul></ul>'); * } * * NumericMenuRenderingOptions.widgetParams.containerNode * .find('li[data-refine-value]') * .each(function() { $(this).off('click'); }); * * var list = NumericMenuRenderingOptions.items.map(function(item) { * return '<li data-refine-value="' + item.value + '">' + * '<input type="radio"' + (item.isRefined ? ' checked' : '') + '/> ' + * item.label + '</li>'; * }); * * NumericMenuRenderingOptions.widgetParams.containerNode.find('ul').html(list); * NumericMenuRenderingOptions.widgetParams.containerNode * .find('li[data-refine-value]') * .each(function() { * $(this).on('click', function(event) { * event.preventDefault(); * event.stopPropagation(); * NumericMenuRenderingOptions.refine($(this).data('refine-value')); * }); * }); * } * * // connect `renderFn` to NumericMenu logic * var customNumericMenu = instantsearch.connectors.connectNumericMenu(renderFn); * * // mount widget on the page * search.addWidget( * customNumericMenu({ * containerNode: $('#custom-numeric-menu-container'), * attribute: 'price', * items: [ * {name: 'All'}, * {end: 4, name: 'less than 4'}, * {start: 4, end: 4, name: '4'}, * {start: 5, end: 10, name: 'between 5 and 10'}, * {start: 10, name: 'more than 10'}, * ], * }) * ); */ export default function connectNumericMenu(renderFn, unmountFn = noop) { checkRendering(renderFn, withUsage()); return (widgetParams = {}) => { const { attribute, items, transformItems = x => x } = widgetParams; if (!attribute) { throw new Error(withUsage('The `attribute` option is required.')); } if (!items) { throw new Error( withUsage('The `items` option expects an array of objects.') ); } return { init({ helper, createURL, instantSearchInstance }) { this._refine = facetValue => { const refinedState = refine( helper.state, attribute, items, facetValue ); helper.setState(refinedState).search(); }; this._createURL = state => facetValue => createURL(refine(state, attribute, items, facetValue)); this._prepareItems = state => items.map(({ start, end, label }) => ({ label, value: window.encodeURI(JSON.stringify({ start, end })), isRefined: isRefined(state, attribute, { start, end }), })); renderFn( { createURL: this._createURL(helper.state), items: transformItems(this._prepareItems(helper.state)), hasNoResults: true, refine: this._refine, instantSearchInstance, widgetParams, }, true ); }, render({ results, state, instantSearchInstance }) { renderFn( { createURL: this._createURL(state), items: transformItems(this._prepareItems(state)), hasNoResults: results.nbHits === 0, refine: this._refine, instantSearchInstance, widgetParams, }, false ); }, dispose({ state }) { unmountFn(); return state.clearRefinements(attribute); }, getWidgetState(uiState, { searchParameters }) { const currentRefinements = searchParameters.getNumericRefinements( attribute ); const equal = currentRefinements['='] && currentRefinements['='][0]; if (equal || equal === 0) { return { ...uiState, numericMenu: { ...uiState.numericMenu, [attribute]: `${currentRefinements['=']}`, }, }; } const lowerBound = (currentRefinements['>='] && currentRefinements['>='][0]) || ''; const upperBound = (currentRefinements['<='] && currentRefinements['<='][0]) || ''; if (lowerBound !== '' || upperBound !== '') { if ( uiState.numericMenu && uiState.numericMenu[attribute] === `${lowerBound}:${upperBound}` ) return uiState; return { ...uiState, numericMenu: { ...uiState.numericMenu, [attribute]: `${lowerBound}:${upperBound}`, }, }; } return uiState; }, getWidgetSearchParameters(searchParameters, { uiState }) { let clearedParams = searchParameters.clearRefinements(attribute); const value = uiState.numericMenu && uiState.numericMenu[attribute]; if (!value) { return clearedParams; } const valueAsEqual = value.indexOf(':') === -1 && value; if (valueAsEqual) { return clearedParams.addNumericRefinement( attribute, '=', valueAsEqual ); } const [lowerBound, upperBound] = value.split(':').map(parseFloat); if (isFiniteNumber(lowerBound)) { clearedParams = clearedParams.addNumericRefinement( attribute, '>=', lowerBound ); } if (isFiniteNumber(upperBound)) { clearedParams = clearedParams.addNumericRefinement( attribute, '<=', upperBound ); } return clearedParams; }, }; }; } function isRefined(state, attribute, option) { const currentRefinements = state.getNumericRefinements(attribute); if (option.start !== undefined && option.end !== undefined) { if (option.start === option.end) { return hasNumericRefinement(currentRefinements, '=', option.start); } } if (option.start !== undefined) { return hasNumericRefinement(currentRefinements, '>=', option.start); } if (option.end !== undefined) { return hasNumericRefinement(currentRefinements, '<=', option.end); } if (option.start === undefined && option.end === undefined) { return Object.keys(currentRefinements).length === 0; } return undefined; } function refine(state, attribute, items, facetValue) { let resolvedState = state; const refinedOption = JSON.parse(window.decodeURI(facetValue)); const currentRefinements = resolvedState.getNumericRefinements(attribute); if (refinedOption.start === undefined && refinedOption.end === undefined) { return resolvedState.clearRefinements(attribute); } if (!isRefined(resolvedState, attribute, refinedOption)) { resolvedState = resolvedState.clearRefinements(attribute); } if (refinedOption.start !== undefined && refinedOption.end !== undefined) { if (refinedOption.start > refinedOption.end) { throw new Error('option.start should be > to option.end'); } if (refinedOption.start === refinedOption.end) { if (hasNumericRefinement(currentRefinements, '=', refinedOption.start)) { resolvedState = resolvedState.removeNumericRefinement( attribute, '=', refinedOption.start ); } else { resolvedState = resolvedState.addNumericRefinement( attribute, '=', refinedOption.start ); } return resolvedState; } } if (refinedOption.start !== undefined) { if (hasNumericRefinement(currentRefinements, '>=', refinedOption.start)) { resolvedState = resolvedState.removeNumericRefinement( attribute, '>=', refinedOption.start ); } else { resolvedState = resolvedState.addNumericRefinement( attribute, '>=', refinedOption.start ); } } if (refinedOption.end !== undefined) { if (hasNumericRefinement(currentRefinements, '<=', refinedOption.end)) { resolvedState = resolvedState.removeNumericRefinement( attribute, '<=', refinedOption.end ); } else { resolvedState = resolvedState.addNumericRefinement( attribute, '<=', refinedOption.end ); } } resolvedState.page = 0; return resolvedState; } function hasNumericRefinement(currentRefinements, operator, value) { const hasOperatorRefinements = currentRefinements[operator] !== undefined; return hasOperatorRefinements && currentRefinements[operator].includes(value); }
dariusbakunas/rawdisk
rawdisk/session.py
# -*- coding: utf-8 -*- import rawdisk.scheme import logging from rawdisk.filesystems.detector import FilesystemDetector from rawdisk.filesystems.unknown_volume import UnknownVolume from rawdisk.plugins.plugin_manager import PluginManager from rawdisk.scheme.mbr import SECTOR_SIZE from rawdisk.scheme.common import PartitionScheme class Session(object): """Main class used to start filesystem analysis. Attributes: partitions (list): List of detected filesystems \ (intialized :class:`Volume <rawdisk.filesystems.volume.Volume>` \ objects) scheme (enum): One of \ :attr:`SCHEME_MBR <rawdisk.scheme.common.SCHEME_MBR>` \ or :attr:`SCHEME_GPT <rawdisk.scheme.common.SCHEME_GPT>`. """ def __init__(self, load_plugins=True): self.logger = logging.getLogger(__name__) self.__volumes = [] self.__partition_scheme = None self.__filename = None self.__fs_plugins = [] if load_plugins: self.load_plugins() @property def filesystem_plugins(self): return self.__fs_plugins def load_plugins(self): """Load filesystem detection plugins""" plugin_manager = PluginManager() self.__fs_plugins = plugin_manager.load_filesystem_plugins() @property def volumes(self): """Return a list of volumes""" return self.__volumes @property def partition_scheme(self): return self.__partition_scheme @property def filename(self): return self.__filename def __analyze_disk_image(self, filename, bs=512): pass def reload(self): pass def load(self, filename, bs=512): """Starts filesystem analysis. Detects supported filesystems and \ loads :attr:`partitions` array. Args: filename - Path to file or device for reading. Raises: IOError - File/device does not exist or is not readable. """ self.__filename = filename self.__volumes = [] # Detect partitioning scheme self.__partition_scheme = rawdisk.scheme.common.detect_scheme(filename) plugin_objects = [plugin.plugin_object for plugin in self.__fs_plugins] fs_detector = FilesystemDetector(fs_plugins=plugin_objects) if self.__partition_scheme == PartitionScheme.SCHEME_MBR: self.__load_mbr_volumes(filename, fs_detector, bs) elif self.__partition_scheme == PartitionScheme.SCHEME_GPT: self.__load_gpt_volumes(filename, fs_detector, bs) else: self.logger.warning('Partitioning scheme could not be determined.') # try detecting standalone volume volume = fs_detector.detect_standalone(filename, offset=0) if volume is not None: volume.load(filename, offset=0) self.__volumes.append(volume) else: self.logger.warning( 'Were not able to detect standalone volume type') def __load_gpt_volumes(self, filename, fs_detector, bs=512): gpt = rawdisk.scheme.gpt.Gpt() gpt.load(filename) for entry in gpt.partition_entries: volume = fs_detector.detect_gpt( filename, entry.first_lba * bs, entry.type_guid ) if volume is not None: volume.load(filename, entry.first_lba * bs) self.__volumes.append(volume) else: self.logger.warning( 'Were not able to detect GPT volume type') self.__volumes.append( UnknownVolume( entry.first_lba * bs, entry.type_guid, (entry.last_lba - entry.first_lba) * bs ) ) def __load_mbr_volumes(self, filename, fs_detector, bs=512): mbr = rawdisk.scheme.mbr.Mbr(filename) # Go through table entries and analyse ones that are supported for entry in mbr.partition_table.partitions: volume = fs_detector.detect_mbr( filename, entry.part_offset, entry.part_type ) if volume is not None: volume.load(filename, entry.part_offset) self.__volumes.append(volume) else: self.logger.warning( 'Were not able to detect MBR volume type') self.__volumes.append( UnknownVolume( entry.part_offset, entry.part_type, entry.total_sectors * SECTOR_SIZE ) )
ExeArco/clutch
clutch/schema/request/torrent/mutator.py
from typing import Sequence, Optional from pydantic import BaseModel, Field from clutch.schema.user.method.shared import IdsArg class TrackerReplaceRequest(BaseModel): tracker_id: int = Field(None, alias="trackerId") announce_url: str = Field(None, alias="announceUrl") class Config: allow_population_by_field_name = True class TorrentMutatorArgumentsRequest(BaseModel): bandwidth_priority: Optional[int] = Field(None, alias="bandwidthPriority") download_limit: Optional[int] = Field(None, alias="downloadLimit") download_limited: Optional[bool] = Field(None, alias="downloadLimited") edit_date: Optional[int] = Field(None, alias="editDate") files_wanted: Optional[Sequence[str]] = Field(None, alias="files-wanted") files_unwanted: Optional[Sequence[str]] = Field(None, alias="file-unwanted") honors_session_limits: Optional[bool] = Field(None, alias="honorsSessionLimits") ids: Optional[IdsArg] labels: Optional[Sequence[str]] location: Optional[str] peer_limit: Optional[int] = Field(None, alias="peer-limit") priority_high: Optional[Sequence[str]] = Field(None, alias="priority-high") priority_low: Optional[Sequence[str]] = Field(None, alias="priority-low") priority_normal: Optional[Sequence[str]] = Field(None, alias="priority-normal") queue_position: Optional[int] = Field(None, alias="queuePosition") seed_idle_limit: Optional[int] = Field(None, alias="seedIdleLimit") seed_idle_mode: Optional[int] = Field(None, alias="seedIdleMode") seed_ratio_limit: Optional[float] = Field(None, alias="seedRatioLimit") seed_ratio_mode: Optional[int] = Field(None, alias="seedRatioMode") tracker_add: Optional[Sequence[str]] = Field(None, alias="trackerAdd") tracker_remove: Optional[Sequence[int]] = Field(None, alias="trackerRemove") tracker_replace: Optional[Sequence[TrackerReplaceRequest]] = Field( None, alias="trackerReplace" ) upload_limit: Optional[int] = Field(None, alias="uploadLimit") upload_limited: Optional[bool] = Field(None, alias="uploadLimited") class Config: allow_population_by_field_name = True
emmanuelJet/aepp-sophia-examples
examples/SmartShop/test/contractsTest.js
<filename>examples/SmartShop/test/contractsTest.js /* * ISC License (ISC) * Copyright (c) 2018 aeternity developers * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY * AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR * OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR * PERFORMANCE OF THIS SOFTWARE. */ const Deployer = require('aeproject-lib').Deployer; const BUYER_CONTRACT_PATH = "./contracts/BuyerContract.aes"; const SELLER_CONTRACT_PATH = "./contracts/SellerContract.aes"; const TRANSPORT_CONTRACT_PATH = "./contracts/TransportContract.aes"; describe('SmartShop Contract', () => { let deployer, buyerDeployer; let ownerKeyPair = wallets[0], buyerKeyPair = wallets[1]; before(async () => { deployer = new Deployer('local', ownerKeyPair.secretKey) buyerDeployer = new Deployer('local', buyerKeyPair.secretKey) }) describe('Deploy Contracts', () => { let addressSeller, addressTransport; it('Deploying SellerContract', async () => { const sellerDeployedPromise = deployer.deploy(SELLER_CONTRACT_PATH, [buyerKeyPair.publicKey, 100]) // Deploy it addressSeller = (await Promise.resolve(sellerDeployedPromise)).address // Get contract Address await assert.isFulfilled(sellerDeployedPromise, 'Could not deploy the SellerContract'); // Check whether it's deployed }) it('Deploying TransportContract', async () => { const transportDeployedPromise = deployer.deploy(TRANSPORT_CONTRACT_PATH, ["Lagos"]) // Deploy it addressTransport = (await Promise.resolve(transportDeployedPromise)).address // Get contract Address await assert.isFulfilled(transportDeployedPromise, 'Could not deploy the TransportContract'); // Check whether it's deployed }) it('Deploying BuyerContract', async () => { const buyerDeployedPromise = buyerDeployer.deploy(BUYER_CONTRACT_PATH, [addressSeller, addressTransport]) // Deploy it await assert.isFulfilled(buyerDeployedPromise, 'Could not deploy the BuyerContract'); // Check whether it's deployed }) }) describe('Interact with contracts', () => { let SellerContract, addressSeller, TransportContract, addressTransport, BuyerContract; before(async () => { const deployedSeller = deployer.deploy(SELLER_CONTRACT_PATH, [buyerKeyPair.publicKey, 100]) SellerContract = await Promise.resolve(deployedSeller) addressSeller = SellerContract.address const deployedTransport = deployer.deploy(TRANSPORT_CONTRACT_PATH, ["Lagos"]) TransportContract = await Promise.resolve(deployedTransport) addressTransport = TransportContract.address const deployedBuyer = buyerDeployer.deploy(BUYER_CONTRACT_PATH, [addressSeller, addressTransport]) BuyerContract = await Promise.resolve(deployedBuyer) }); it("Should deposit item price to SellerContract from the BuyerContract", async () => { let result = await BuyerContract.deposit_to_seller_contract({amount: 100}) assert.isOk(result) }) it("Should check SellerContract balance from the SellerContract", async () => { let result = (await SellerContract.seller_contract_balance()).decodedResult assert.equal(result, 100) }) it("Should send item from the SellerContract", async () => { let result = await SellerContract.send_item() assert.isOk(result) }) it("Should check item status from the SellerContract", async () => { let result = (await SellerContract.check_item_status()).decodedResult assert.equal(result, 'sent_to_transport_courier') }) it("Should change courier location from the TransportContract", async () => { let result = await TransportContract.change_location("Abuja") assert.isOk(result) }) it("Should check courier status from the TransportContract", async () => { let result = (await TransportContract.check_courier_status()).decodedResult assert.equal(result, 'on_way') }) it("Should check courier location from the TransportContract", async () => { let result = (await TransportContract.check_courier_location()).decodedResult assert.equal(result, 'Abuja') }) it("Should deliver item from the TransportContract", async () => { let result = await TransportContract.delivered_item("Jos") assert.isOk(result) }) it("Should check courier location from BuyerContract", async () => { let result = (await BuyerContract.check_courier_location()).decodedResult assert.equal(result, 'Jos') }) it("Should check courier status from BuyerContract", async () => { let result = (await BuyerContract.check_courier_status()).decodedResult assert.equal(result, 'delivered') }) it("Should recieve item from BuyerContract", async () => { let result = await BuyerContract.received_item() assert.isOk(result) }) it("Should check SellerContract balance from BuyerContract", async () => { let result = (await BuyerContract.seller_contract_balance()).decodedResult assert.equal(result, 0) }) }) })
carvelle/payment
public_html/webmail/modules/MailWebclient/js/views/CHtmlEditorView.js
'use strict'; var _ = require('underscore'), $ = require('jquery'), ko = require('knockout'), AddressUtils = require('%PathToCoreWebclientModule%/js/utils/Address.js'), TextUtils = require('%PathToCoreWebclientModule%/js/utils/Text.js'), Types = require('%PathToCoreWebclientModule%/js/utils/Types.js'), App = require('%PathToCoreWebclientModule%/js/App.js'), Browser = require('%PathToCoreWebclientModule%/js/Browser.js'), CJua = require('%PathToCoreWebclientModule%/js/CJua.js'), UserSettings = require('%PathToCoreWebclientModule%/js/Settings.js'), Popups = require('%PathToCoreWebclientModule%/js/Popups.js'), AlertPopup = require('%PathToCoreWebclientModule%/js/popups/AlertPopup.js'), CAttachmentModel = require('modules/%ModuleName%/js/models/CAttachmentModel.js'), CCrea = require('modules/%ModuleName%/js/CCrea.js'), MailCache = require('modules/%ModuleName%/js/Cache.js'), Settings = require('modules/%ModuleName%/js/Settings.js'), CColorPickerView = require('modules/%ModuleName%/js/views/CColorPickerView.js') ; /** * @constructor * @param {boolean} bInsertImageAsBase64 * @param {Object=} oParent */ function CHtmlEditorView(bInsertImageAsBase64, oParent) { this.oParent = oParent; this.creaId = 'creaId' + Math.random().toString().replace('.', ''); this.textFocused = ko.observable(false); this.workareaDom = ko.observable(); this.uploaderAreaDom = ko.observable(); this.editorUploaderBodyDragOver = ko.observable(false); this.htmlEditorDom = ko.observable(); this.toolbarDom = ko.observable(); this.colorPickerDropdownDom = ko.observable(); this.insertLinkDropdownDom = ko.observable(); this.insertImageDropdownDom = ko.observable(); this.isFWBold = ko.observable(false); this.isFSItalic = ko.observable(false); this.isTDUnderline = ko.observable(false); this.isTDStrikeThrough = ko.observable(false); this.isEnumeration = ko.observable(false); this.isBullets = ko.observable(false); this.isEnable = ko.observable(true); this.isEnable.subscribe(function () { if (this.oCrea) { this.oCrea.setEditable(this.isEnable()); } }, this); this.bInsertImageAsBase64 = bInsertImageAsBase64; this.bAllowFileUpload = !(bInsertImageAsBase64 && window.File === undefined); this.bAllowInsertImage = Settings.AllowInsertImage; this.lockFontSubscribing = ko.observable(false); this.bAllowImageDragAndDrop = !Browser.ie10AndAbove; this.aFonts = ['Arial', 'Arial Black', 'Courier New', 'Tahoma', 'Times New Roman', 'Verdana']; this.sDefaultFont = Settings.DefaultFontName; this.correctFontFromSettings(); this.selectedFont = ko.observable(''); this.selectedFont.subscribe(function () { if (this.oCrea && !this.lockFontSubscribing() && !this.inactive()) { this.oCrea.fontName(this.selectedFont()); } }, this); this.iDefaultSize = Settings.DefaultFontSize; this.selectedSize = ko.observable(0); this.selectedSize.subscribe(function () { if (this.oCrea && !this.lockFontSubscribing() && !this.inactive()) { this.oCrea.fontSize(this.selectedSize()); } }, this); this.visibleInsertLinkPopup = ko.observable(false); this.linkForInsert = ko.observable(''); this.linkFocused = ko.observable(false); this.visibleLinkPopup = ko.observable(false); this.linkPopupDom = ko.observable(null); this.linkHrefDom = ko.observable(null); this.linkHref = ko.observable(''); this.visibleLinkHref = ko.observable(false); this.visibleImagePopup = ko.observable(false); this.visibleImagePopup.subscribe(function () { this.onImageOut(); }, this); this.imagePopupTop = ko.observable(0); this.imagePopupLeft = ko.observable(0); this.imageSelected = ko.observable(false); this.tooltipText = ko.observable(''); this.tooltipPopupTop = ko.observable(0); this.tooltipPopupLeft = ko.observable(0); this.visibleInsertImagePopup = ko.observable(false); this.imageUploaderButton = ko.observable(null); this.aUploadedImagesData = []; this.imagePathFromWeb = ko.observable(''); this.visibleFontColorPopup = ko.observable(false); this.oFontColorPickerView = new CColorPickerView(TextUtils.i18n('%MODULENAME%/LABEL_TEXT_COLOR'), this.setTextColorFromPopup, this); this.oBackColorPickerView = new CColorPickerView(TextUtils.i18n('%MODULENAME%/LABEL_BACKGROUND_COLOR'), this.setBackColorFromPopup, this); this.inactive = ko.observable(false); this.sPlaceholderText = ''; this.bAllowChangeInputDirection = UserSettings.IsRTL || Settings.AllowChangeInputDirection; this.disableEdit = ko.observable(false); this.textChanged = ko.observable(false); } CHtmlEditorView.prototype.ViewTemplate = '%ModuleName%_HtmlEditorView'; CHtmlEditorView.prototype.setInactive = function (bInactive) { this.inactive(bInactive); if (this.inactive()) { this.setPlaceholder(); } else { this.removePlaceholder(); } }; CHtmlEditorView.prototype.setPlaceholder = function () { var sText = this.removeAllTags(this.getText()); if (sText === '' || sText === '&nbsp;') { this.setText('<span>' + this.sPlaceholderText + '</span>'); if (this.oCrea) { this.oCrea.setBlur(); } } }; CHtmlEditorView.prototype.removePlaceholder = function () { var sText = this.oCrea ? this.removeAllTags(this.oCrea.getText(false)) : ''; if (sText === this.sPlaceholderText) { this.setText(''); if (this.oCrea) { this.oCrea.setFocus(true); } } }; CHtmlEditorView.prototype.hasOpenedPopup = function () { return this.visibleInsertLinkPopup() || this.visibleLinkPopup() || this.visibleImagePopup() || this.visibleInsertImagePopup() || this.visibleFontColorPopup(); }; CHtmlEditorView.prototype.setDisableEdit = function (bDisableEdit) { this.disableEdit(!!bDisableEdit); }; CHtmlEditorView.prototype.correctFontFromSettings = function () { var sDefaultFont = this.sDefaultFont, bFinded = false ; _.each(this.aFonts, function (sFont) { if (sFont.toLowerCase() === sDefaultFont.toLowerCase()) { sDefaultFont = sFont; bFinded = true; } }); if (bFinded) { this.sDefaultFont = sDefaultFont; } else { this.aFonts.push(sDefaultFont); } }; /** * @param {Object} $link */ CHtmlEditorView.prototype.showLinkPopup = function ($link) { var $workarea = $(this.workareaDom()), $composePopup = $workarea.closest('.panel.compose'), oWorkareaPos = $workarea.position(), oPos = $link.position(), iHeight = $link.height(), iLeft = Math.round(oPos.left + oWorkareaPos.left), iTop = Math.round(oPos.top + iHeight + oWorkareaPos.top) ; this.linkHref($link.attr('href') || $link.text()); $(this.linkPopupDom()).css({ 'left': iLeft, 'top': iTop }); $(this.linkHrefDom()).css({ 'left': iLeft, 'top': iTop }); if (!Browser.firefox && $composePopup.length === 1) { $(this.linkPopupDom()).css({ 'max-width': ($composePopup.width() - iLeft - 40) + 'px', 'white-space': 'pre-line', 'word-wrap': 'break-word' }); } this.visibleLinkPopup(true); }; CHtmlEditorView.prototype.hideLinkPopup = function () { this.visibleLinkPopup(false); }; CHtmlEditorView.prototype.showChangeLink = function () { this.visibleLinkHref(true); this.hideLinkPopup(); }; CHtmlEditorView.prototype.changeLink = function () { this.oCrea.changeLink(this.linkHref()); this.hideChangeLink(); }; CHtmlEditorView.prototype.hideChangeLink = function () { this.visibleLinkHref(false); }; /** * @param {jQuery} $image * @param {Object} oEvent */ CHtmlEditorView.prototype.showImagePopup = function ($image, oEvent) { var $workarea = $(this.workareaDom()), oWorkareaPos = $workarea.position(), oWorkareaOffset = $workarea.offset() ; this.imagePopupLeft(Math.round(oEvent.pageX + oWorkareaPos.left - oWorkareaOffset.left)); this.imagePopupTop(Math.round(oEvent.pageY + oWorkareaPos.top - oWorkareaOffset.top)); this.visibleImagePopup(true); }; CHtmlEditorView.prototype.hideImagePopup = function () { this.visibleImagePopup(false); }; CHtmlEditorView.prototype.resizeImage = function (sSize) { var oParams = { 'width': 'auto', 'height': 'auto' }; switch (sSize) { case Enums.HtmlEditorImageSizes.Small: oParams.width = '300px'; break; case Enums.HtmlEditorImageSizes.Medium: oParams.width = '600px'; break; case Enums.HtmlEditorImageSizes.Large: oParams.width = '1200px'; break; case Enums.HtmlEditorImageSizes.Original: oParams.width = 'auto'; break; } this.oCrea.changeCurrentImage(oParams); this.visibleImagePopup(false); }; CHtmlEditorView.prototype.onImageOver = function (oEvent) { if (oEvent.target.nodeName === 'IMG' && !this.visibleImagePopup()) { this.imageSelected(true); this.tooltipText(TextUtils.i18n('%MODULENAME%/ACTION_CLICK_TO_EDIT_IMAGE')); var self = this, $workarea = $(this.workareaDom()) ; $workarea.bind('mousemove.image', function (oEvent) { var oWorkareaPos = $workarea.position(), oWorkareaOffset = $workarea.offset() ; self.tooltipPopupTop(Math.round(oEvent.pageY + oWorkareaPos.top - oWorkareaOffset.top)); self.tooltipPopupLeft(Math.round(oEvent.pageX + oWorkareaPos.left - oWorkareaOffset.left)); }); } return true; }; CHtmlEditorView.prototype.onImageOut = function (oEvent) { if (this.imageSelected()) { this.imageSelected(false); var $workarea = $(this.workareaDom()); $workarea.unbind('mousemove.image'); } return true; }; CHtmlEditorView.prototype.commit = function () { this.textChanged(false); }; /** * @param {string} sText * @param {boolean} bPlain * @param {string} sTabIndex * @param {string} sPlaceholderText */ CHtmlEditorView.prototype.init = function (sText, bPlain, sTabIndex, sPlaceholderText) { this.sPlaceholderText = sPlaceholderText || ''; if (this.oCrea) { this.oCrea.$container = $('#' + this.oCrea.oOptions.creaId); // in case if knockoutjs destroyed dom element with html editor if (this.oCrea.$container.children().length === 0) { this.oCrea.start(this.isEnable()); } } else { $(document.body).on('click', _.bind(function (oEvent) { var oParent = $(oEvent.target).parents('span.dropdown_helper'); if (oParent.length === 0) { this.closeAllPopups(true); } }, this)); this.initEditorUploader(); this.oCrea = new CCrea({ 'creaId': this.creaId, 'fontNameArray': this.aFonts, 'defaultFontName': this.sDefaultFont, 'defaultFontSize': this.iDefaultSize, 'isRtl': UserSettings.IsRTL, 'enableDrop': false, 'onChange': _.bind(this.textChanged, this, true), 'onCursorMove': _.bind(this.setFontValuesFromText, this), 'onFocus': _.bind(this.onCreaFocus, this), 'onBlur': _.bind(this.onCreaBlur, this), 'onUrlIn': _.bind(this.showLinkPopup, this), 'onUrlOut': _.bind(this.hideLinkPopup, this), 'onImageSelect': _.bind(this.showImagePopup, this), 'onImageBlur': _.bind(this.hideImagePopup, this), 'onItemOver': (Browser.mobileDevice || App.isMobile()) ? null : _.bind(this.onImageOver, this), 'onItemOut': (Browser.mobileDevice || App.isMobile()) ? null : _.bind(this.onImageOut, this), 'openInsertLinkDialog': _.bind(this.insertLink, this), 'onUrlClicked': true }); this.oCrea.start(this.isEnable()); } this.oCrea.setTabIndex(sTabIndex); this.clearUndoRedo(); this.setText(sText, bPlain); this.setFontValuesFromText(); this.aUploadedImagesData = []; this.selectedFont(this.sDefaultFont); this.selectedSize(this.iDefaultSize); }; CHtmlEditorView.prototype.isInitialized = function () { return !!this.oCrea; }; CHtmlEditorView.prototype.setFocus = function () { if (this.oCrea) { this.oCrea.setFocus(false); } }; /** * @param {string} sNewSignatureContent * @param {string} sOldSignatureContent */ CHtmlEditorView.prototype.changeSignatureContent = function (sNewSignatureContent, sOldSignatureContent) { if (this.oCrea) { this.oCrea.changeSignatureContent(sNewSignatureContent, sOldSignatureContent); } }; CHtmlEditorView.prototype.setFontValuesFromText = function () { this.lockFontSubscribing(true); this.isFWBold(this.oCrea.getIsBold()); this.isFSItalic(this.oCrea.getIsItalic()); this.isTDUnderline(this.oCrea.getIsUnderline()); this.isTDStrikeThrough(this.oCrea.getIsStrikeThrough()); this.isEnumeration(this.oCrea.getIsEnumeration()); this.isBullets(this.oCrea.getIsBullets()); this.selectedFont(this.oCrea.getFontName()); this.selectedSize(this.oCrea.getFontSizeInNumber().toString()); this.lockFontSubscribing(false); }; CHtmlEditorView.prototype.isUndoAvailable = function () { if (this.oCrea) { return this.oCrea.isUndoAvailable(); } return false; }; CHtmlEditorView.prototype.getPlainText = function () { if (this.oCrea) { return this.oCrea.getPlainText(); } return ''; }; /** * @param {boolean=} bRemoveSignatureAnchor = false */ CHtmlEditorView.prototype.getText = function (bRemoveSignatureAnchor) { var sText = this.oCrea ? this.oCrea.getText(bRemoveSignatureAnchor) : '' ; return (this.sPlaceholderText !== '' && this.removeAllTags(sText) === this.sPlaceholderText) ? '' : sText; }; /** * @param {string} sText * @param {boolean} bPlain */ CHtmlEditorView.prototype.setText = function (sText, bPlain) { if (this.oCrea) { if (bPlain) { this.oCrea.setPlainText(sText); } else { this.oCrea.setText(sText); } if (this.inactive() && sText === '') { this.setPlaceholder(); } } }; CHtmlEditorView.prototype.undoAndClearRedo = function () { if (this.oCrea) { this.oCrea.undo(); this.oCrea.clearRedo(); } }; CHtmlEditorView.prototype.clearUndoRedo = function () { if (this.oCrea) { this.oCrea.clearUndoRedo(); } }; CHtmlEditorView.prototype.isEditing = function () { return this.oCrea ? this.oCrea.bEditing : false; }; /** * @param {string} sText */ CHtmlEditorView.prototype.removeAllTags = function (sText) { return sText.replace(/<style>.*<\/style>/g, '').replace(/<[^>]*>/g, ''); }; CHtmlEditorView.prototype.onCreaFocus = function () { if (this.oCrea) { this.closeAllPopups(); this.textFocused(true); } }; CHtmlEditorView.prototype.onCreaBlur = function () { if (this.oCrea) { this.textFocused(false); } }; CHtmlEditorView.prototype.onEscHandler = function () { if (!Popups.hasOpenedMaximizedPopups()) { this.closeAllPopups(); } }; /** * @param {boolean} bWithoutLinkPopup */ CHtmlEditorView.prototype.closeAllPopups = function (bWithoutLinkPopup) { bWithoutLinkPopup = !!bWithoutLinkPopup; if (!bWithoutLinkPopup) { this.visibleLinkPopup(false); } this.visibleInsertLinkPopup(false); this.visibleImagePopup(false); this.visibleInsertImagePopup(false); this.visibleFontColorPopup(false); }; /** * @param {string} sHtml */ CHtmlEditorView.prototype.insertHtml = function (sHtml) { if (this.oCrea) { if (!this.oCrea.isFocused()) { this.oCrea.setFocus(true); } this.oCrea.insertHtml(sHtml, false); } }; /** * @param {Object} oViewModel * @param {Object} oEvent */ CHtmlEditorView.prototype.insertLink = function (oViewModel, oEvent) { if (!this.inactive() && !this.visibleInsertLinkPopup()) { oEvent.stopPropagation(); this.linkForInsert(this.oCrea.getSelectedText()); this.closeAllPopups(); this.visibleInsertLinkPopup(true); this.linkFocused(true); } }; /** * @param {Object} oCurrentViewModel * @param {Object} event */ CHtmlEditorView.prototype.insertLinkFromPopup = function (oCurrentViewModel, event) { if (this.linkForInsert().length > 0) { if (AddressUtils.isCorrectEmail(this.linkForInsert())) { this.oCrea.insertEmailLink(this.linkForInsert()); } else { this.oCrea.insertLink(this.linkForInsert()); } } this.closeInsertLinkPopup(oCurrentViewModel, event); }; /** * @param {Object} oCurrentViewModel * @param {Object} event */ CHtmlEditorView.prototype.closeInsertLinkPopup = function (oCurrentViewModel, event) { this.visibleInsertLinkPopup(false); if (event) { event.stopPropagation(); } }; CHtmlEditorView.prototype.textColor = function (oViewModel, oEvent) { if (!this.inactive()) { this.closeAllPopups(); if (!this.visibleFontColorPopup()) { oEvent.stopPropagation(); this.visibleFontColorPopup(true); this.oFontColorPickerView.onShow(); this.oBackColorPickerView.onShow(); } } }; /** * @param {string} sColor * @return string */ CHtmlEditorView.prototype.colorToHex = function (sColor) { if (sColor.substr(0, 1) === '#') { return sColor; } /*jslint bitwise: true*/ var aDigits = /(.*?)rgb\((\d+), (\d+), (\d+)\)/.exec(sColor), iRed = Types.pInt(aDigits[2]), iGreen = Types.pInt(aDigits[3]), iBlue = Types.pInt(aDigits[4]), iRgb = iBlue | (iGreen << 8) | (iRed << 16), sRgb = iRgb.toString(16) ; /*jslint bitwise: false*/ while (sRgb.length < 6) { sRgb = '0' + sRgb; } return aDigits[1] + '#' + sRgb; }; /** * @param {string} sColor */ CHtmlEditorView.prototype.setTextColorFromPopup = function (sColor) { this.oCrea.textColor(this.colorToHex(sColor)); this.closeAllPopups(); }; /** * @param {string} sColor */ CHtmlEditorView.prototype.setBackColorFromPopup = function (sColor) { this.oCrea.backgroundColor(this.colorToHex(sColor)); this.closeAllPopups(); }; CHtmlEditorView.prototype.insertImage = function (oViewModel, oEvent) { if (!this.inactive() && Settings.AllowInsertImage && !this.visibleInsertImagePopup()) { oEvent.stopPropagation(); this.imagePathFromWeb(''); this.closeAllPopups(); this.visibleInsertImagePopup(true); this.initUploader(); } return true; }; /** * @param {Object} oCurrentViewModel * @param {Object} event */ CHtmlEditorView.prototype.insertWebImageFromPopup = function (oCurrentViewModel, event) { if (Settings.AllowInsertImage && this.imagePathFromWeb().length > 0) { this.oCrea.insertImage(this.imagePathFromWeb()); } this.closeInsertImagePopup(oCurrentViewModel, event); }; /** * @param {string} sUid * @param oAttachmentData */ CHtmlEditorView.prototype.insertComputerImageFromPopup = function (sUid, oAttachmentData) { var oAttachment = new CAttachmentModel(), sViewLink = '', bResult = false ; oAttachment.parse(oAttachmentData); sViewLink = oAttachment.getActionUrl('view'); if (Settings.AllowInsertImage && sViewLink.length > 0) { bResult = this.oCrea.insertImage(sViewLink); if (bResult) { $(this.oCrea.$editableArea) .find('img[src="' + sViewLink + '"]') .attr('data-x-src-cid', sUid) ; oAttachmentData.CID = sUid; this.aUploadedImagesData.push(oAttachmentData); } } this.closeInsertImagePopup(); }; CHtmlEditorView.prototype.getUploadedImagesData = function () { return this.aUploadedImagesData; }; /** * @param {?=} oCurrentViewModel * @param {?=} event */ CHtmlEditorView.prototype.closeInsertImagePopup = function (oCurrentViewModel, event) { this.visibleInsertImagePopup(false); if (event) { event.stopPropagation(); } }; /** * Initializes file uploader. */ CHtmlEditorView.prototype.initUploader = function () { if (this.imageUploaderButton() && !this.oJua) { this.oJua = new CJua({ 'action': '?/Api/', 'name': 'jua-uploader', 'queueSize': 2, 'clickElement': this.imageUploaderButton(), 'hiddenElementsPosition': UserSettings.IsRTL ? 'right' : 'left', 'disableMultiple': true, 'disableAjaxUpload': false, 'disableDragAndDrop': true, 'hidden': _.extendOwn({ 'Module': Settings.ServerModuleName, 'Method': 'UploadAttachment', 'Parameters': function () { return JSON.stringify({ 'AccountID': MailCache.currentAccountId() }); } }, App.getCommonRequestParameters()) }); if (this.bInsertImageAsBase64) { this.oJua .on('onSelect', _.bind(this.onEditorDrop, this)) ; } else { this.oJua .on('onSelect', _.bind(this.onFileUploadSelect, this)) .on('onComplete', _.bind(this.onFileUploadComplete, this)) ; } } }; /** * Initializes file uploader for editor. */ CHtmlEditorView.prototype.initEditorUploader = function () { if (Settings.AllowInsertImage && this.uploaderAreaDom() && !this.editorUploader) { var fBodyDragEnter = null, fBodyDragOver = null ; if (this.oParent && this.oParent.composeUploaderDragOver && this.oParent.onFileUploadProgress && this.oParent.onFileUploadStart && this.oParent.onFileUploadComplete) { fBodyDragEnter = _.bind(function () { this.editorUploaderBodyDragOver(true); this.oParent.composeUploaderDragOver(true); }, this); fBodyDragOver = _.bind(function () { this.editorUploaderBodyDragOver(false); this.oParent.composeUploaderDragOver(false); }, this); this.editorUploader = new CJua({ 'action': '?/Api/', 'name': 'jua-uploader', 'queueSize': 1, 'dragAndDropElement': this.bAllowImageDragAndDrop ? this.uploaderAreaDom() : null, 'disableMultiple': true, 'disableAjaxUpload': false, 'disableDragAndDrop': !this.bAllowImageDragAndDrop, 'hidden': _.extendOwn({ 'Module': Settings.ServerModuleName, 'Method': 'UploadAttachment', 'Parameters': function () { return JSON.stringify({ 'AccountID': MailCache.currentAccountId() }); } }, App.getCommonRequestParameters()) }); this.editorUploader .on('onDragEnter', _.bind(this.oParent.composeUploaderDragOver, this.oParent, true)) .on('onDragLeave', _.bind(this.oParent.composeUploaderDragOver, this.oParent, false)) .on('onBodyDragEnter', fBodyDragEnter) .on('onBodyDragLeave', fBodyDragOver) .on('onProgress', _.bind(this.oParent.onFileUploadProgress, this.oParent)) .on('onSelect', _.bind(this.onEditorDrop, this)) .on('onStart', _.bind(this.oParent.onFileUploadStart, this.oParent)) .on('onComplete', _.bind(this.oParent.onFileUploadComplete, this.oParent)) ; } else { fBodyDragEnter = _.bind(this.editorUploaderBodyDragOver, this, true); fBodyDragOver = _.bind(this.editorUploaderBodyDragOver, this, false); this.editorUploader = new CJua({ 'queueSize': 1, 'dragAndDropElement': this.bAllowImageDragAndDrop ? this.uploaderAreaDom() : null, 'disableMultiple': true, 'disableAjaxUpload': false, 'disableDragAndDrop': !this.bAllowImageDragAndDrop }); this.editorUploader .on('onBodyDragEnter', fBodyDragEnter) .on('onBodyDragLeave', fBodyDragOver) .on('onSelect', _.bind(this.onEditorDrop, this)) ; } } }; CHtmlEditorView.prototype.isDragAndDropSupported = function () { return this.editorUploader ? this.editorUploader.isDragAndDropSupported() : false; }; CHtmlEditorView.prototype.onEditorDrop = function (sUid, oData) { var oReader = null, oFile = null, self = this, bCreaFocused = false, hash = Math.random().toString(), sId = '' ; if (oData && oData.File && (typeof oData.File.type === 'string')) { if (Settings.AllowInsertImage && 0 === oData.File.type.indexOf('image/')) { oFile = oData.File; if (Settings.ImageUploadSizeLimit > 0 && oFile.size > Settings.ImageUploadSizeLimit) { Popups.showPopup(AlertPopup, [TextUtils.i18n('COREWEBCLIENT/ERROR_UPLOAD_SIZE')]); } else { oReader = new window.FileReader(); bCreaFocused = this.oCrea.isFocused(); if (!bCreaFocused) { this.oCrea.setFocus(true); } sId = oFile.name + '_' + hash; this.oCrea.insertHtml('<img id="' + sId + '" src="./static/styles/images/wait.gif" />', true); if (!bCreaFocused) { this.oCrea.fixFirefoxCursorBug(); } oReader.onload = function (oEvent) { self.oCrea.changeImageSource(sId, oEvent.target.result); }; oReader.readAsDataURL(oFile); } } else { if (this.oParent && this.oParent.onFileUploadSelect) { this.oParent.onFileUploadSelect(sUid, oData); return true; } else if (!Browser.ie10AndAbove) { Popups.showPopup(AlertPopup, [TextUtils.i18n('%MODULENAME%/ERROR_NOT_IMAGE_CHOOSEN')]); } } } return false; }; /** * @param {Object} oFile */ CHtmlEditorView.prototype.isFileImage = function (oFile) { if (typeof oFile.Type === 'string') { return (-1 !== oFile.Type.indexOf('image')); } else { var iDotPos = oFile.FileName.lastIndexOf('.'), sExt = oFile.FileName.substr(iDotPos + 1), aImageExt = ['jpg', 'jpeg', 'gif', 'tif', 'tiff', 'png'] ; return (-1 !== $.inArray(sExt, aImageExt)); } }; /** * @param {string} sUid * @param {Object} oFile */ CHtmlEditorView.prototype.onFileUploadSelect = function (sUid, oFile) { if (!this.isFileImage(oFile)) { Popups.showPopup(AlertPopup, [TextUtils.i18n('%MODULENAME%/ERROR_NOT_IMAGE_CHOOSEN')]); return false; } this.closeInsertImagePopup(); return true; }; /** * @param {string} sUid * @param {boolean} bResponseReceived * @param {Object} oData */ CHtmlEditorView.prototype.onFileUploadComplete = function (sUid, bResponseReceived, oData) { var sError = ''; if (oData && oData.Result) { if (oData.Result.Error) { sError = oData.Result.Error === 'size' ? TextUtils.i18n('COREWEBCLIENT/ERROR_UPLOAD_SIZE') : TextUtils.i18n('COREWEBCLIENT/ERROR_UPLOAD_UNKNOWN'); Popups.showPopup(AlertPopup, [sError]); } else { this.oCrea.setFocus(true); this.insertComputerImageFromPopup(sUid, oData.Result.Attachment); } } else { Popups.showPopup(AlertPopup, [TextUtils.i18n('COREWEBCLIENT/ERROR_UPLOAD_UNKNOWN')]); } }; CHtmlEditorView.prototype.undo = function () { if (!this.inactive()) { this.oCrea.undo(); } return false; }; CHtmlEditorView.prototype.redo = function () { if (!this.inactive()) { this.oCrea.redo(); } return false; }; CHtmlEditorView.prototype.bold = function () { if (!this.inactive()) { this.oCrea.bold(); this.isFWBold(!this.isFWBold()); } return false; }; CHtmlEditorView.prototype.italic = function () { if (!this.inactive()) { this.oCrea.italic(); this.isFSItalic(!this.isFSItalic()); } return false; }; CHtmlEditorView.prototype.underline = function () { if (!this.inactive()) { this.oCrea.underline(); this.isTDUnderline(!this.isTDUnderline()); } return false; }; CHtmlEditorView.prototype.strikeThrough = function () { if (!this.inactive()) { this.oCrea.strikeThrough(); this.isTDStrikeThrough(!this.isTDStrikeThrough()); } return false; }; CHtmlEditorView.prototype.numbering = function () { if (!this.inactive()) { this.oCrea.numbering(); this.isBullets(false); this.isEnumeration(!this.isEnumeration()); } return false; }; CHtmlEditorView.prototype.bullets = function () { if (!this.inactive()) { this.oCrea.bullets(); this.isEnumeration(false); this.isBullets(!this.isBullets()); } return false; }; CHtmlEditorView.prototype.removeFormat = function () { if (!this.inactive()) { this.oCrea.removeFormat(); } return false; }; CHtmlEditorView.prototype.setRtlDirection = function () { if (!this.inactive()) { this.oCrea.setRtlDirection(); } return false; }; CHtmlEditorView.prototype.setLtrDirection = function () { if (!this.inactive()) { this.oCrea.setLtrDirection(); } return false; }; module.exports = CHtmlEditorView;
crazyblitz/springboot-learn
first-spring-boot-starter/src/main/java/com/ley/first/spring/boot/starter/compose/annotation/AnnotationAttributeApplication.java
<reponame>crazyblitz/springboot-learn package com.ley.first.spring.boot.starter.compose.annotation; import org.junit.Test; import org.springframework.core.annotation.AnnotatedElementUtils; import org.springframework.core.annotation.AnnotationAttributes; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.lang.reflect.AnnotatedElement; /** * 类描述:Spring注解属性抽象AnnotationAttributes * * @author liuenyuan * @date 2019/4/27 10:39 * @describe * @see org.springframework.core.annotation.AnnotationAttributes */ public class AnnotationAttributeApplication { /** * Spring注解属性覆盖 * <br/> * 多层次元注解场景存在限制,根本原因在于Java注解的静态性. * <br/> * 较低层次的注解属性将覆盖较高层.<b>(AnnotationAttributes采用注解就近覆盖设计原则)</b> * <br/> **/ @Test public void annotationAttributesOverride() { AnnotatedElement element = TransactionalService.class; AnnotationAttributes serviceAnnotationAttributes = AnnotatedElementUtils .getMergedAnnotationAttributes(element, Service.class); AnnotationAttributes transactionalAnnotationAttributes = AnnotatedElementUtils .getMergedAnnotationAttributes(element, Transactional.class); print0(serviceAnnotationAttributes); print0(transactionalAnnotationAttributes); } private static void print0(AnnotationAttributes annotationAttributes) { System.out.printf("注解 %s 属性集合 : \n", annotationAttributes.annotationType().getName()); annotationAttributes.forEach((name, value) -> System.out.printf("\t 属性 %s : %s \n", name, value)); } }
Ybalrid/orbiter
Extern/mssdk_dx7/samples/Multimedia/D3DRM/src/FlyFS/fly.cpp
//----------------------------------------------------------------------------- // File: fly.cpp // // Desc: // // Copyright (C) 1998-1999 Microsoft Corporation. All Rights Reserved. //----------------------------------------------------------------------------- #include <d3drmwin.h> #include <math.h> #include <stdlib.h> #define SAFE_RELEASE(x) if (x != NULL) {x->Release(); x = NULL;} #define MSG(str) MessageBox( NULL, str, "Application Message", MB_OK ) struct PathInfo { FLOAT fTime; LPDIRECT3DRMFRAME3 pChaseFrame; LPDIRECT3DRMFRAME3 pPlaneFrame; LPDIRECT3DRMANIMATION2 pFlightPath; }; #define NUM_SMOKE_TRAILS 7 DWORD dwNumSmokeTrails = 0; DWORD dwNumTrailsDone = 0; LPDIRECT3DRMFRAME3 apSmokeFrame[NUM_SMOKE_TRAILS]; //----------------------------------------------------------------------------- // Name: // Desc: //----------------------------------------------------------------------------- void __cdecl CleanupObjectsCallback( LPDIRECT3DRMOBJECT pObj, VOID* pArg ) { PathInfo* pInfo = (PathInfo*)pArg; for( int i=0; i<NUM_SMOKE_TRAILS; i++ ) apSmokeFrame[i]->Release(); pInfo->pChaseFrame->Release(); pInfo->pPlaneFrame->Release(); pInfo->pFlightPath->Release(); } //----------------------------------------------------------------------------- // Name: // Desc: //----------------------------------------------------------------------------- void __cdecl MoveCameraCallback( LPDIRECT3DRMFRAME3 pCamera, VOID* pArg, D3DVALUE delta ) { PathInfo* pInfo = (PathInfo*)pArg; D3DVECTOR dir, up; D3DVECTOR dirCam, upCam; LPDIRECT3DRMFRAME3 pScene; D3DVALUE a_bit; pCamera->GetScene( &pScene ); pInfo->fTime += 0.04f; pInfo->pFlightPath->SetFrame( pCamera ); pInfo->pFlightPath->SetTime( pInfo->fTime ); pInfo->pFlightPath->SetFrame( pInfo->pPlaneFrame ); pInfo->pFlightPath->SetTime( pInfo->fTime + 0.5f ); pInfo->pFlightPath->SetFrame( pInfo->pChaseFrame ); pInfo->pFlightPath->SetTime( pInfo->fTime + 1.0f ); pCamera->LookAt( pInfo->pPlaneFrame, pScene, D3DRMCONSTRAIN_Z); pInfo->pPlaneFrame->LookAt( pInfo->pChaseFrame, pScene, D3DRMCONSTRAIN_Y); pCamera->GetOrientation( pScene, &dirCam, &upCam); pInfo->pPlaneFrame->GetOrientation( pScene, &dir, &up); up.x = dir.x - dirCam.x; up.y = dir.y - dirCam.y + 1.0f; up.z = dir.z - dirCam.z; pInfo->pPlaneFrame->SetOrientation( pScene, dir.x, dir.y, dir.z, up.x, up.y, up.z ); if( dwNumTrailsDone < NUM_SMOKE_TRAILS ) { pScene->AddVisual( (LPDIRECT3DRMVISUAL)apSmokeFrame[dwNumSmokeTrails]); dwNumTrailsDone++; } else { if( dwNumSmokeTrails == NUM_SMOKE_TRAILS ) dwNumSmokeTrails = 0; } a_bit = D3DDivide(D3DDivide(D3DVAL(dwNumSmokeTrails), D3DVAL(NUM_SMOKE_TRAILS)), 10.0f); pInfo->pFlightPath->SetFrame( apSmokeFrame[dwNumSmokeTrails]); pInfo->pFlightPath->SetTime( pInfo->fTime + 0.4f - a_bit ); apSmokeFrame[dwNumSmokeTrails]->SetOrientation( pScene, dir.x, dir.y, dir.z, up.x, up.y, up.z); dwNumSmokeTrails++; pScene->Release(); } //----------------------------------------------------------------------------- // Name: // Desc: //----------------------------------------------------------------------------- BOOL BuildScene( LPDIRECT3DRM3 pD3DRM, LPDIRECT3DRMDEVICE3 pDevice, LPDIRECT3DRMVIEWPORT2 pViewport, LPDIRECT3DRMFRAME3 pScene, LPDIRECT3DRMFRAME3 pCamera ) { LPDIRECT3DRMFRAME3 lights = NULL; D3DRMBOX box; LPDIRECT3DRMMESHBUILDER3 plane_builder = NULL; LPDIRECT3DRMMESHBUILDER3 mesh_builder = NULL; LPDIRECT3DRMMESHBUILDER3 smoke_builder = NULL; LPDIRECT3DRMMESH plane = NULL; LPDIRECT3DRMMESH mesh = NULL; LPDIRECT3DRMMESH smokemesh = NULL; LPDIRECT3DRMLIGHT ambient = NULL; LPDIRECT3DRMLIGHT parallel = NULL; D3DCOLOR smokec; LPDIRECT3DRMFRAME3 frame = NULL; LPDIRECT3DRMFRAME3 sl = NULL; LPDIRECT3DRMFRAME3 sr = NULL; int i; int numPts = 11; D3DVECTOR path[] = { -8.0f, 3.0f, -12.0f, -4.0f, 2.0f, -8.0f, -2.0f, 0.0f, -4.0f, 9.0f, -1.0f, 7.0f, 4.0f, 6.0f, 10.0f, -4.0f, 5.0f, 9.0f, 5.5f, 3.5f, -6.5f, 2.0f, 5.0f, -10.0f, 0.0f, 4.0f, -15.0f, -5.0f, 4.0f, -15.0f, -8.0f, 3.0f, -12.0f }; D3DVALUE path_t[] = { 0.0f, 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f }; static PathInfo info; if( FAILED( pViewport->SetField( 0.8f ) ) ) goto generic_error; if( FAILED( pDevice->SetQuality( D3DRMRENDER_GOURAUD ) ) ) goto generic_error; #ifdef FOG if( FAILED( pDevice->SetDither( TRUE ) ) ) goto generic_error; if( FAILED( pScene->SetFogEnable( TRUE ) ) ) goto generic_error; if( FAILED( pScene->SetFogParams( 1, 30, 1 ) ) ) goto generic_error; #endif // This Demo flies a plane through a small landscape, followed by a // camera. The paths are spline curves. // Initialize smoke trail smokec = D3DRMCreateColorRGBA(0.6f, 0.6f, 0.6f, 0.5f ); if( FAILED( pD3DRM->CreateMeshBuilder( &smoke_builder ) ) ) goto generic_error; if( FAILED( smoke_builder->Load( "sphere0.x", NULL, D3DRMLOAD_FROMFILE, NULL, NULL ) ) ) { MSG("Failed to load sphere0.x.\n" ); goto ret_with_error; } if( FAILED( smoke_builder->Scale( 0.015f, 0.015f, 0.015f ) ) ) goto generic_error; if( FAILED( smoke_builder->CreateMesh( &smokemesh ) ) ) goto generic_error; for( i=0; i<NUM_SMOKE_TRAILS; i++ ) { if( FAILED( pD3DRM->CreateFrame( pScene, &apSmokeFrame[i] ) ) ) goto generic_error; if( FAILED( pD3DRM->CreateFrame( apSmokeFrame[i], &sl ) ) ) goto generic_error; if( FAILED( pD3DRM->CreateFrame( apSmokeFrame[i], &sr ) ) ) goto generic_error; if( FAILED( sl->AddVisual( (LPDIRECT3DRMVISUAL)smokemesh ) ) ) goto generic_error; if( FAILED( sr->AddVisual( (LPDIRECT3DRMVISUAL)smokemesh ) ) ) goto generic_error; if( FAILED( sr->SetPosition( apSmokeFrame[i], -0.1f, 0.0f, 0.0f ) ) ) goto generic_error; if (FAILED( apSmokeFrame[i]->SetMaterialMode( D3DRMMATERIAL_FROMFRAME ) ) ) goto generic_error; if (FAILED( apSmokeFrame[i]->SetColor( smokec ) ) ) goto generic_error; if (FAILED( sl->SetMaterialMode( D3DRMMATERIAL_FROMPARENT ) ) ) goto generic_error; if (FAILED( sr->SetMaterialMode( D3DRMMATERIAL_FROMPARENT ) ) ) goto generic_error; SAFE_RELEASE(sl); SAFE_RELEASE(sr); } // initialize the lights in the scene if( FAILED( pD3DRM->CreateFrame( pScene, &lights ) ) ) goto generic_error; if( FAILED( lights->SetPosition( pScene, 5.0f, 5.0f, -5.0f ) ) ) goto generic_error; if( FAILED( pD3DRM->CreateLightRGB( D3DRMLIGHT_PARALLELPOINT, 0.8f, 0.6f, 0.7f, &parallel ) ) ) goto generic_error; if( FAILED( lights->AddLight( parallel ) ) ) goto generic_error; if( FAILED( pD3DRM->CreateLightRGB( D3DRMLIGHT_AMBIENT, 0.1f, 0.1f, 0.1f, &ambient ) ) ) goto generic_error; if( FAILED( pScene->AddLight( ambient ) ) ) goto generic_error; // load mesh file if( FAILED( pD3DRM->CreateFrame( pScene, &frame ) ) ) goto generic_error; if( FAILED( pD3DRM->CreateMeshBuilder( &mesh_builder ) ) ) goto generic_error; if( FAILED( mesh_builder->Load( "land4.x", NULL, D3DRMLOAD_FROMFILE, NULL, NULL ) ) ) goto generic_error; if( FAILED( mesh_builder->Scale( 10.0f, 8.0f, 10.0f ) ) ) goto generic_error; if( FAILED( mesh_builder->GetBox( &box ) ) ) goto generic_error; // Color the landscape's faces. if( mesh_builder ) { LPDIRECT3DRMFACEARRAY faces; LPDIRECT3DRMFACE this_face; int face_count, vertex_count; D3DVALUE range, height; D3DVECTOR *coords; if( FAILED(mesh_builder->GetFaces( &faces ) ) ) goto generic_error; face_count = faces->GetSize(); range = box.max.y - box.min.y; // color the faces according to the height for( int i=0; i<face_count; i++ ) { DWORD dwVertexCount; faces->GetElement( i, &this_face); dwVertexCount = this_face->GetVertexCount(); coords = (LPD3DVECTOR) malloc( dwVertexCount * sizeof(D3DVECTOR)); this_face->GetVertices( &dwVertexCount, coords, NULL); vertex_count=dwVertexCount; if( dwVertexCount ) { // find maximum height of the face height = coords[0].y; for( DWORD j=1; j<dwVertexCount; j++ ) { if( coords[j].y > height ) height = coords[j].y; } height = D3DDivide((height - box.min.y), range); if( height < 0.03f ) // water this_face->SetColorRGB( 0.20f, 0.20f, 0.50f ); else if( height < 0.3f ) // greenery this_face->SetColorRGB( 0.10f, 0.80f, 0.10f ); else if( height < 0.5f ) // rocks this_face->SetColorRGB( 0.60f, 0.30f, 0.30f ); else if( height < 0.7f ) // dirty snow this_face->SetColorRGB( 0.80f, 0.65f, 0.65f ); else // snow this_face->SetColorRGB( 1.00f, 1.00f, 1.00f ); } free(coords); SAFE_RELEASE(this_face); } SAFE_RELEASE(faces); } if( FAILED( mesh_builder->CreateMesh( &mesh ) ) ) goto generic_error; if( FAILED( frame->AddVisual( (LPDIRECT3DRMVISUAL)mesh ) ) ) goto generic_error; if( FAILED( pD3DRM->CreateMeshBuilder( &plane_builder ) ) ) goto generic_error; if( FAILED( plane_builder->Load( "dropship.x", NULL, D3DRMLOAD_FROMFILE, NULL, NULL ) ) ) { MSG("Failed to load dropship.x.\n" ); goto ret_with_error; } if( FAILED( plane_builder->Scale( 0.015f, 0.008f, 0.015f ) ) ) goto generic_error; if( FAILED( plane_builder->CreateMesh( &plane ) ) ) goto generic_error; if( FAILED( pD3DRM->CreateAnimation( &info.pFlightPath ) ) ) goto generic_error; info.pFlightPath->SetOptions( D3DRMANIMATION_CLOSED | D3DRMANIMATION_SPLINEPOSITION | D3DRMANIMATION_POSITION); for( i=0; i<numPts; i++ ) info.pFlightPath->AddPositionKey( path_t[i], path[i].x, path[i].y, path[i].z); info.fTime = 0.0f; if( FAILED( pD3DRM->CreateFrame( pScene, &info.pChaseFrame ) ) ) goto generic_error; if( FAILED( pD3DRM->CreateFrame( pScene, &info.pPlaneFrame ) ) ) goto generic_error; if( FAILED( info.pPlaneFrame->AddVisual( (LPDIRECT3DRMVISUAL)plane ) ) ) goto generic_error; if( FAILED( pCamera->AddMoveCallback( MoveCameraCallback, (VOID*)&info, D3DRMCALLBACK_PREORDER ) ) ) goto generic_error; if( FAILED( pCamera->AddDestroyCallback( CleanupObjectsCallback, &info ) ) ) goto generic_error; SAFE_RELEASE( lights ); SAFE_RELEASE( plane_builder ); SAFE_RELEASE( mesh_builder ); SAFE_RELEASE( smoke_builder ); SAFE_RELEASE( plane ); SAFE_RELEASE( mesh ); SAFE_RELEASE( smokemesh ); SAFE_RELEASE( ambient ); SAFE_RELEASE( parallel ); SAFE_RELEASE( frame ); return TRUE; generic_error: MSG("A failure has occurred while building the scene.\n"); ret_with_error: SAFE_RELEASE( lights ); SAFE_RELEASE( plane_builder ); SAFE_RELEASE( mesh_builder ); SAFE_RELEASE( smoke_builder ); SAFE_RELEASE( plane ); SAFE_RELEASE( mesh ); SAFE_RELEASE( smokemesh ); SAFE_RELEASE( ambient ); SAFE_RELEASE( parallel ); SAFE_RELEASE( frame ); SAFE_RELEASE( sl ); SAFE_RELEASE( sr ); return FALSE; } //----------------------------------------------------------------------------- // Name: // Desc: //----------------------------------------------------------------------------- VOID OverrideDefaults( BOOL* pbNoTextures, BOOL* pbResizingDisabled, BOOL* pbConstRenderQuality, CHAR** pstrName ) { (*pbNoTextures) = TRUE; (*pbConstRenderQuality) = TRUE; (*pstrName) = "Fly Full-Screen Direct3DRM Example"; }
jdart/fsImgur
src/client/content/matcher.js
import Imgur from './Imgur.react'; import FsImg from './FsImg.react'; import FsIframe from './FsIframe.react'; import Reddit from './reddit/Reddit.react'; import Gfycat from './Gfycat.react'; import Vidme from './Vidme.react'; import Vimeo from './Vimeo.react'; import Instagram from './Instagram.react'; import Youtube from './Youtube.react'; import Twitter from './Twitter.react'; import Streamable from './Streamable.react'; import Readability from './Readability.react'; import {hostMatch} from '../utils'; import ImgurNav from './ImgurNav.react'; import Video from './Video.react'; import Eshare from './Eshare.react'; import {domain as eshareDomain} from './Eshare.react'; import C from '../../common/reddit/content/consts'; import {extIn} from '../../common/utils'; import {domainDecoder} from '../utils'; const imageUrl = extIn('jpg', 'jpeg', 'png', 'gif'); const videoUrl = extIn('webm', 'mp4'); const imageMimeTypeRegex = /\/(jpg|jpeg|png|gif)$/i; const matchers = [{ test: (_, entry) => entry.viewMode === C.REDDIT_CONTENT_VIEW_MODE_COMMENTS, component: Reddit, }, { host: 'imgur.com', component: Imgur, navComponent: ImgurNav, preload: true, }, { test: (entryUrl, entry) => imageUrl(entryUrl) || imageMimeTypeRegex.test(entry.mime_type), component: FsImg, preload: true, }, { test: (entryUrl, _) => videoUrl(entryUrl), component: Video, }, { host: 'reddit.com', component: Reddit, }, { host: 'twitter.com', component: Twitter, }, { host: 'streamable.com', component: Streamable, }, { host: 'vid.me', component: Vidme, }, { host: 'vimeo.com', component: Vimeo, }, { host: domainDecoder(eshareDomain), component: Eshare, }, { host: 'gfycat.com', component: Gfycat, }, { host: 'instagram.com', component: Instagram, }, { host: ['youtube.com', 'youtu.be'], component: Youtube, }, { component: FsIframe, }, { component: Readability, }]; export function componentMatcher(entry) { const entryUrl = entry.url; const matches = matchers .filter(({host}) => !host || [].concat(host).some(host => hostMatch(host, entryUrl)) ) .filter(({test}) => !test || test(entryUrl, entry)); return matches.length ? matches[0] : false; }
kongaraju/antkorp
3party/gtk+-3.12.1/gtk/gtksearchengine.c
/* * Copyright (C) 2005 Novell, Inc. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library. If not, see <http://www.gnu.org/licenses/>. * * Author: <NAME> <<EMAIL>> * * Based on nautilus-search-engine.c */ #include "config.h" #include "gtksearchengine.h" #include "gtksearchenginesimple.h" #include "gtksearchenginetracker.h" #include "gtksearchenginequartz.h" #include <gdk/gdk.h> /* for GDK_WINDOWING_QUARTZ */ #ifndef G_OS_WIN32 /* No tracker on Windows */ #define HAVE_TRACKER 1 #endif enum { HITS_ADDED, HITS_SUBTRACTED, FINISHED, ERROR, LAST_SIGNAL }; static guint signals[LAST_SIGNAL]; G_DEFINE_ABSTRACT_TYPE (GtkSearchEngine, _gtk_search_engine, G_TYPE_OBJECT); static void finalize (GObject *object) { G_OBJECT_CLASS (_gtk_search_engine_parent_class)->finalize (object); } static void _gtk_search_engine_class_init (GtkSearchEngineClass *class) { GObjectClass *gobject_class; gobject_class = G_OBJECT_CLASS (class); gobject_class->finalize = finalize; signals[HITS_ADDED] = g_signal_new ("hits-added", G_TYPE_FROM_CLASS (class), G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GtkSearchEngineClass, hits_added), NULL, NULL, g_cclosure_marshal_VOID__POINTER, G_TYPE_NONE, 1, G_TYPE_POINTER); signals[HITS_SUBTRACTED] = g_signal_new ("hits-subtracted", G_TYPE_FROM_CLASS (class), G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GtkSearchEngineClass, hits_subtracted), NULL, NULL, g_cclosure_marshal_VOID__POINTER, G_TYPE_NONE, 1, G_TYPE_POINTER); signals[FINISHED] = g_signal_new ("finished", G_TYPE_FROM_CLASS (class), G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GtkSearchEngineClass, finished), NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); signals[ERROR] = g_signal_new ("error", G_TYPE_FROM_CLASS (class), G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GtkSearchEngineClass, error), NULL, NULL, g_cclosure_marshal_VOID__STRING, G_TYPE_NONE, 1, G_TYPE_STRING); } static void _gtk_search_engine_init (GtkSearchEngine *engine) { } GtkSearchEngine * _gtk_search_engine_new (void) { GtkSearchEngine *engine = NULL; #ifdef HAVE_TRACKER engine = _gtk_search_engine_tracker_new (); if (engine) return engine; #endif #ifdef GDK_WINDOWING_QUARTZ engine = _gtk_search_engine_quartz_new (); if (engine) return engine; #endif if (g_thread_supported ()) engine = _gtk_search_engine_simple_new (); return engine; } void _gtk_search_engine_set_query (GtkSearchEngine *engine, GtkQuery *query) { g_return_if_fail (GTK_IS_SEARCH_ENGINE (engine)); g_return_if_fail (GTK_SEARCH_ENGINE_GET_CLASS (engine)->set_query != NULL); GTK_SEARCH_ENGINE_GET_CLASS (engine)->set_query (engine, query); } void _gtk_search_engine_start (GtkSearchEngine *engine) { g_return_if_fail (GTK_IS_SEARCH_ENGINE (engine)); g_return_if_fail (GTK_SEARCH_ENGINE_GET_CLASS (engine)->start != NULL); GTK_SEARCH_ENGINE_GET_CLASS (engine)->start (engine); } void _gtk_search_engine_stop (GtkSearchEngine *engine) { g_return_if_fail (GTK_IS_SEARCH_ENGINE (engine)); g_return_if_fail (GTK_SEARCH_ENGINE_GET_CLASS (engine)->stop != NULL); GTK_SEARCH_ENGINE_GET_CLASS (engine)->stop (engine); } gboolean _gtk_search_engine_is_indexed (GtkSearchEngine *engine) { g_return_val_if_fail (GTK_IS_SEARCH_ENGINE (engine), FALSE); g_return_val_if_fail (GTK_SEARCH_ENGINE_GET_CLASS (engine)->is_indexed != NULL, FALSE); return GTK_SEARCH_ENGINE_GET_CLASS (engine)->is_indexed (engine); } void _gtk_search_engine_hits_added (GtkSearchEngine *engine, GList *hits) { g_return_if_fail (GTK_IS_SEARCH_ENGINE (engine)); g_signal_emit (engine, signals[HITS_ADDED], 0, hits); } void _gtk_search_engine_hits_subtracted (GtkSearchEngine *engine, GList *hits) { g_return_if_fail (GTK_IS_SEARCH_ENGINE (engine)); g_signal_emit (engine, signals[HITS_SUBTRACTED], 0, hits); } void _gtk_search_engine_finished (GtkSearchEngine *engine) { g_return_if_fail (GTK_IS_SEARCH_ENGINE (engine)); g_signal_emit (engine, signals[FINISHED], 0); } void _gtk_search_engine_error (GtkSearchEngine *engine, const gchar *error_message) { g_return_if_fail (GTK_IS_SEARCH_ENGINE (engine)); g_signal_emit (engine, signals[ERROR], 0, error_message); }
HaHaSDP-UCSC/haha
HaHa/doxygen/html/search/files_6.js
<gh_stars>1-10 var searchData= [ ['main_2ec',['main.c',['../main_8c.html',1,'']]], ['messagequeue_2ec',['messagequeue.c',['../messagequeue_8c.html',1,'']]], ['messagequeue_2eh',['messagequeue.h',['../messagequeue_8h.html',1,'']]] ];
gmerz/MatterApi
matterapi/endpoints/sync_api/open_graph.py
""" Module to access the OpenGraph endpoints """ # pylint: disable=too-many-lines,too-many-locals,too-many-public-methods,too-few-public-methods from typing import Dict, Union from pydantic import BaseModel from ...models import OpenGraph, OpenGraphJsonBody from ..base import ApiBaseClass class OpenGraphApi(ApiBaseClass): """Endpoint for getting Open Graph metadata.""" def open_graph( self, *, json_body: Union[OpenGraphJsonBody, Dict], ) -> OpenGraph: """Get open graph metadata for url Get Open Graph Metadata for a specif URL. Use the Open Graph protocol to get some generic metadata about a URL. Used for creating link previews. Permissions: No permission required but must be logged in. Minimum Server Version: 3.10 Api Reference: `OpenGraph <https://api.mattermost.com/#operation/OpenGraph>`_ """ url = "/opengraph" if isinstance(json_body, BaseModel): json_json_body = json_body.dict(exclude_unset=True) else: json_json_body = json_body request_kwargs = { "url": url, "json": json_json_body, } # pylint: disable-next=protected-access with self.client._get_httpx_client() as httpx_client: response = httpx_client.post( **request_kwargs, ) if self.skip_response_parsing: return response if response.status_code == 200: response200 = OpenGraph.parse_obj(response.json()) return response200 return response
MisterZhouZhou/pythonLearn
weixinrabit/wxtools/wxConfigSingleton.py
<gh_stars>1-10 ''' 配置单利 ''' def SingleTon(cls, *args, **kwargs): instances = {} def _singleton(): if cls not in instances: instances[cls] = cls(*args, **kwargs) return instances[cls] return _singleton @SingleTon class WXConfigSingleton(object): # 自动登录 auto_reply = False # 自己的id my_user_name = '' # 机器人类型, xb(小冰),tl(图灵) robot_type = ''
cantona/NT6
nitan/adm/daemons/story/beihai.c
<filename>nitan/adm/daemons/story/beihai.c // story:beihai 北海傳説 #include <ansi.h> int give_gift(string name); nosave mixed *story = ({ "華山。", "李鐵嘴打了個哈欠,百無聊賴。", "一人忽然闖了進來,看看李鐵嘴,忽然道:你的卦可準?", "李鐵嘴眼睛瞪得大大的,道:不準不要錢。", "來人哈哈大笑道:好!好!那麼你算算,明天下不下雨?", "李鐵嘴擺好卦籤,一頓亂算。", "來人冷冷的看着。", "李鐵嘴道:明天午時應該下雨,一寸三分!", "來人道:倘然不對,那又如何?", "李鐵嘴白眼一翻,道:哈!那你就來取我性命好了!", "來人冷笑一聲,轉身即走。", "李鐵嘴默默不作言語,收拾行囊,從後門溜走了。", "......", "北海龍宮。", "老龍王靜坐,心裏暗道:眼看午時就要過,這卦不就錯了?我不下雨,天上能出雲彩?", "忽然天宮旨意到:張天師求雨,玉帝已準。午時華山降雨一寸三分!", "啊?老龍王翻翻白眼,吐了口白沫,倒在地上。", "半天過後,龍王爬起,心中忿忿:豈有此理?慢來,我就降它一寸四分雨,不還是我贏?", "......", "龍王降過雨,趕到李鐵嘴處要人頭:人呢?人呢?跑了!!!", "......", "玉帝大怒:好麼,和我作對!", (: give_gift, "beihai" :), "聽説北海龍王(king of dragon)降雨失責,被貶謫至凡間。", }); string prompt() { return HIM "【神話】" NOR; } void create() { seteuid(getuid()); } mixed query_story_message(int step) { return step < sizeof(story) ? story[step] : 0; } int give_gift(string name) { object ob; ob = new(CLASS_D("misc/") + name); if (! objectp(ob)) return 0; NPC_D->place_npc(ob); if (! objectp(environment(ob))) { destruct(ob); return 0; } CHANNEL_D->do_channel(find_object(STORY_D), "sys", ob->name() + "出現在" + environment(ob)->short() + "(" + base_name(environment(ob)) + ")。"); return 1; }
lshaoge951/remote_aus
src/main/java/com/haitang/project/tool/audio/client/Client.java
<gh_stars>1-10 package com.haitang.project.tool.audio.client; //客户端界面 import java.io.*; import java.awt.*; import java.awt.event.*; import java.net.*; import com.haitang.project.tool.audio.Playback; public class Client extends Frame { TextField tf=new TextField(20); TextArea ta=new TextArea(); Button send=new Button("send"); Button voiceChat=new Button("voiceChat"); Socket client; InputStream in; OutputStream out; BufferedReader br; BufferedWriter bw; public Client() { super("Client"); add("North",tf); add("Center",ta); add("South",send); add("East",voiceChat); setSize(250,250); show(); addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { dispose(); System.exit(0); } }); try { //Socket client=new Socket("127.0.0.1",5000); Socket client=new Socket("127.0.0.1",5000); ta.append("Connect to:"+client.getInetAddress().getHostName()+"\n\n"); in=client.getInputStream(); br=new BufferedReader(new InputStreamReader(in)); out=client.getOutputStream(); bw=new BufferedWriter(new OutputStreamWriter(out)); } catch(IOException ioe) {} while(true) { try { byte[] buf=new byte[200]; in.read(buf); String str=new String(buf); ta.append("Server say:"+str); ta.append("\n"); } catch(IOException e) { System.out.print(e.getMessage()); } } } public boolean action(Event evt, Object arg) { //点击发送按钮发送信息 if(evt.target.equals(send)) { try { String str=tf.getText(); byte[] buf=str.getBytes(); tf.setText(null); out.write(buf); ta.append("I say:"+str); ta.append("\n"); } catch(IOException ioe) { System.out.print(ioe.getMessage()); } } //点击语音按钮进行语音 else if(evt.target.equals(voiceChat)) { try { //Socket cli=new Socket("127.0.0.1",6000); Socket cli=new Socket("127.0.0.1",6000); Capture cap=new Capture(cli); cap.start(); // Socket ser=new Socket("172.19.73.113",7000); // se=ser.accept(); // Capture cap=new Capture(ser); // cap.start(); // Playback player=new Playback(ser); // player.start(); } catch(Exception e) {} } return true; } /*public static void main(String[] args) { Client client=new Client(); } */ }
Infornia/BrewStoryBro
Library/Formula/texinfo.rb
class Texinfo < Formula desc "Official documentation format of the GNU project" homepage "http://www.gnu.org/software/texinfo/" url "http://ftpmirror.gnu.org/texinfo/texinfo-5.2.tar.gz" mirror "http://ftp.gnu.org/gnu/texinfo/texinfo-5.2.tar.gz" sha1 "dc54edfbb623d46fb400576b3da181f987e63516" bottle do sha1 "988fc8c195a43ad8b9dea1da2827fb24c794c200" => :yosemite sha1 "40453ac408ede2cb5470935a5c5d2360f64032b5" => :mavericks sha1 "1ac4d9ac120248a5b71cb45199c01bad850a7655" => :mountain_lion end keg_only :provided_by_osx, <<-EOS.undent Software that uses TeX, such as lilypond and octave, require a newer version of these files. EOS def install system "./configure", "--disable-dependency-tracking", "--disable-install-warnings", "--prefix=#{prefix}" system "make", "install" # The install warns about needing to install texinfo.tex and some other support files. # The texinfo.tex in tex-live 2008 is identical to texinfo's version, so we can ignore this. # However, it complains about installing epsf.tex in TEXMF/tex/generic/dvips, so let's do that... # This somewhat breaks the homebrew philosophy, I am sorry. # Also, we don't depend on tex-live, but this directory only exists if it is installed. if File.exist? "#{HOMEBREW_PREFIX}/share/texmf-dist/" then cp "doc/epsf.tex", "#{HOMEBREW_PREFIX}/share/texmf-dist/tex/generic/dvips/" end end test do (testpath/"test.texinfo").write <<-EOS.undent @ifnottex @node Top @top Hello World! @end ifnottex @bye EOS system "#{bin}/makeinfo", "test.texinfo" assert_match /Hello World!/, File.read("test.info") end end
linklab-uva/deepracing
DCNN-Pytorch/deepracing_models/data_loading/__init__.py
<reponame>linklab-uva/deepracing<filename>DCNN-Pytorch/deepracing_models/data_loading/__init__.py import bisect import numpy as np class TimeIndex: def __init__(self, time_array : np.ndarray, data_values : np.ndarray): if np.any((time_array[1:]-time_array[:-1])<0): raise ValueError("time_array must be everywhere non-decreasing") self.time_array = time_array self.data_values = data_values def sample(self, tmin : float, tmax : float): if tmin>=tmax: raise ValueError("tmin (%f) cannot be greater-equal to tmax (%f)" % (tmin, tmax)) if tmin < self.time_array[0]: raise ValueError("tmin (%f) cannot be outside range of time values [%f, %f]" %(tmin, self.time_array[0], self.time_array[-1])) if tmax > self.time_array[-1]: raise ValueError("tmax (%f) cannot be outside range of time values [%f, %f]" %(tmax, self.time_array[0], self.time_array[-1])) leftbisect = max(bisect.bisect_left(self.time_array, tmin) - 1, 0) rightbisect = min(bisect.bisect_right(self.time_array, tmax) + 1, self.time_array.shape[0]-1) return (leftbisect, rightbisect), self.data_values[leftbisect:rightbisect]
mennat1/simple-yield-farm-truffle-version
node_modules/@ethereum-waffle/compiler/dist/esm/config/loadConfig.js
import fs from 'fs'; import path from 'path'; export async function loadConfig(configPath) { if (configPath) { return require(path.join(process.cwd(), configPath)); } else if (fs.existsSync('./waffle.json')) { return require(path.join(process.cwd(), './waffle.json')); } else { return {}; } }
hcmaza/nortia
src/main/java/ar/edu/undec/nortia/model/Banco.java
<reponame>hcmaza/nortia<gh_stars>0 /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package ar.edu.undec.nortia.model; import java.io.Serializable; import java.util.List; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import javax.persistence.OneToMany; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.validation.constraints.Size; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; /** * * @author gongono */ @Entity @Table(name = "banco", schema = "ap") @SequenceGenerator(name="banco_id_seq", sequenceName="ap.banco_id_seq", allocationSize=1) @XmlRootElement @NamedQueries({ @NamedQuery(name = "Banco.findAll", query = "SELECT b FROM Banco b"), @NamedQuery(name = "Banco.findById", query = "SELECT b FROM Banco b WHERE b.id = :id"), @NamedQuery(name = "Banco.findByRazonsocial", query = "SELECT b FROM Banco b WHERE b.razonsocial = :razonsocial"), @NamedQuery(name = "Banco.findBySucursal", query = "SELECT b FROM Banco b WHERE b.sucursal = :sucursal")}) public class Banco implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.SEQUENCE, generator="banco_id_seq") @Basic(optional = false) @Column(name = "id") private Integer id; @Size(max = 150) @Column(name = "razonsocial") private String razonsocial; @Size(max = 150) @Column(name = "sucursal") private String sucursal; @OneToMany(mappedBy = "bancoid") private List<Cuentabancaria> cuentabancariaList; public Banco() { } public Banco(Integer id) { this.id = id; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getRazonsocial() { return razonsocial; } public void setRazonsocial(String razonsocial) { this.razonsocial = razonsocial; } public String getSucursal() { return sucursal; } public void setSucursal(String sucursal) { this.sucursal = sucursal; } @XmlTransient public List<Cuentabancaria> getCuentabancariaList() { return cuentabancariaList; } public void setCuentabancariaList(List<Cuentabancaria> cuentabancariaList) { this.cuentabancariaList = cuentabancariaList; } @Override public int hashCode() { int hash = 0; hash += (id != null ? id.hashCode() : 0); return hash; } @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set if (!(object instanceof Banco)) { return false; } Banco other = (Banco) object; if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { return false; } return true; } @Override public String toString() { return "ar.edu.undec.nortia.model.Banco[ id=" + id + " ]"; } }
geoff604/taskunifier-geoff
TaskUnifier/TaskUnifierGui/src/main/java/com/leclercb/taskunifier/gui/components/tasks/table/menu/TaskTableMenu.java
<gh_stars>1-10 /* * TaskUnifier * Copyright (c) 2013, <NAME> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * - Neither the name of TaskUnifier or the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.leclercb.taskunifier.gui.components.tasks.table.menu; import com.leclercb.commons.api.event.listchange.ListChangeEvent; import com.leclercb.commons.api.event.listchange.ListChangeListener; import com.leclercb.commons.api.event.listchange.WeakListChangeListener; import com.leclercb.commons.api.event.propertychange.WeakPropertyChangeListener; import com.leclercb.taskunifier.api.models.BasicModel; import com.leclercb.taskunifier.api.models.ModelStatus; import com.leclercb.taskunifier.api.models.templates.TaskTemplateFactory; import com.leclercb.taskunifier.gui.actions.*; import com.leclercb.taskunifier.gui.translations.Translations; import com.leclercb.taskunifier.gui.utils.ComponentFactory; import com.leclercb.taskunifier.gui.utils.ImageUtils; import com.leclercb.taskunifier.gui.utils.TemplateUtils; import javax.swing.*; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; public class TaskTableMenu extends JPopupMenu implements ListChangeListener, PropertyChangeListener { private JMenu templatesMenu; public TaskTableMenu() { super(Translations.getString("general.task")); this.initialize(); } private void initialize() { this.add(new ActionEditTasks(16, 16)); this.add(ComponentFactory.createPostponeMenu()); this.addSeparator(); this.add(new ActionAddTask(16, 16)); this.initializeTemplateMenu(); this.add(new ActionAddSubTask(16, 16)); this.add(new ActionAddSubTaskAtSameLevel(16, 16)); this.add(new ActionDuplicateTasks(16, 16)); this.addSeparator(); this.add(new ActionRefresh(16, 16)); this.addSeparator(); this.add(new JMenuItem(new ActionCollapseAll(16, 16))); this.add(new JMenuItem(new ActionExpandAll(16, 16))); this.add(new JMenuItem(new ActionSelectParentTasks(16, 16))); this.addSeparator(); this.add(new ActionCreateTaskTemplateFromTask(16, 16)); this.add(new ActionCreateNoteFromTask(16, 16)); this.add(new ActionMailTo(16, 16)); this.add(new ActionPrintSelectedModels(16, 16)); this.addSeparator(); this.add(new ActionDelete(16, 16)); } private void initializeTemplateMenu() { this.templatesMenu = new JMenu( Translations.getString("action.add_template_task")); this.templatesMenu.setToolTipText(Translations.getString("action.add_template_task")); this.templatesMenu.setIcon(ImageUtils.getResourceImage( "template.png", 16, 16)); this.add(this.templatesMenu); TemplateUtils.updateTemplateList( ActionAddTemplateTask.ADD_TASK_LISTENER, this.templatesMenu); TaskTemplateFactory.getInstance().addPropertyChangeListener( BasicModel.PROP_MODEL_STATUS, new WeakPropertyChangeListener( TaskTemplateFactory.getInstance(), this)); TaskTemplateFactory.getInstance().addListChangeListener( new WeakListChangeListener( TaskTemplateFactory.getInstance(), this)); } @Override public void listChange(ListChangeEvent event) { TemplateUtils.updateTemplateList( ActionAddTemplateTask.ADD_TASK_LISTENER, this.templatesMenu); } @Override public void propertyChange(PropertyChangeEvent evt) { if (((ModelStatus) evt.getOldValue()).isEndUserStatus() != ((ModelStatus) evt.getNewValue()).isEndUserStatus()) { TemplateUtils.updateTemplateList( ActionAddTemplateTask.ADD_TASK_LISTENER, this.templatesMenu); } } }
vivaxy/algorithms
python/problems/roman_to_integer.py
<gh_stars>1-10 """ https://leetcode.com/problems/roman-to-integer/ https://leetcode.com/submissions/detail/130844466/ """ class Solution: def romanToInt(self, s): """ :type s: str :rtype: int """ dic = { 'I': 1, 'V': 5, 'X': 10, 'L': 50, 'C': 100, 'D': 500, 'M': 1000 } totalValue = 0 prevValue = 0 sArray = list(s) sArray.reverse() for char in sArray: value = dic[char] if value >= prevValue: totalValue += value else: totalValue -= value prevValue = value return totalValue import unittest class Test(unittest.TestCase): def test(self): solution = Solution() self.assertEqual(solution.romanToInt('I'), 1) self.assertEqual(solution.romanToInt('IV'), 4) if __name__ == '__main__': unittest.main()
ShotaOd/dabuntu
carbon-component/src/main/java/org/carbon/component/annotation/Component.java
<filename>carbon-component/src/main/java/org/carbon/component/annotation/Component.java package org.carbon.component.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * This annotation is for register component. * This annotation can be on Class or Method, and behave differently. * <h1>At Type</h1> * Produce Component Class itself that is injected to other Class(annotated by @Component) field annotated by {@link Inject} * <br /> * <h1>At Method</h1> * (using with {@link Configuration} annotation) * Produce Component by invoking annotated method * @author <NAME> 2016/10/02 * * @see Inject * @see Configuration */ @Target({ElementType.ANNOTATION_TYPE, ElementType.TYPE, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) public @interface Component { }
Histler/Infodota
app/src/main/java/com/badr/infodota/counter/task/TruepickerCounterLoadRequest.java
<filename>app/src/main/java/com/badr/infodota/counter/task/TruepickerCounterLoadRequest.java package com.badr.infodota.counter.task; import android.content.Context; import com.badr.infodota.BeanContainer; import com.badr.infodota.base.service.TaskRequest; import com.badr.infodota.counter.api.TruepickerHero; import com.badr.infodota.counter.service.CounterService; import java.util.List; /** * Created by ABadretdinov * 20.08.2015 * 16:53 */ public class TruepickerCounterLoadRequest extends TaskRequest<TruepickerHero.List> { private Context mContext; private List<Integer> mAllies; private List<Integer> mEnemies; public TruepickerCounterLoadRequest(Context context, List<Integer> allies, List<Integer> enemies) { super(TruepickerHero.List.class); mContext = context; mAllies = allies; mEnemies = enemies; } @Override public TruepickerHero.List loadData() throws Exception { BeanContainer beanContainer = BeanContainer.getInstance(); CounterService service = beanContainer.getCounterService(); return service.getCounters(mContext, mAllies, mEnemies, 1); } }
kieuloc29/BOTLO
nodemodules/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/attributes.js
<filename>nodemodules/node_modules/hyntax/lib-es5/tree-constructor-context-handlers/attributes.js<gh_stars>0 "use strict"; var _require = require('../constants/token-types'), TOKEN_ATTRIBUTE_KEY = _require.TOKEN_ATTRIBUTE_KEY, TOKEN_ATTRIBUTE_ASSIGNMENT = _require.TOKEN_ATTRIBUTE_ASSIGNMENT, TOKEN_OPEN_TAG_END = _require.TOKEN_OPEN_TAG_END, TOKEN_OPEN_TAG_END_SCRIPT = _require.TOKEN_OPEN_TAG_END_SCRIPT, TOKEN_OPEN_TAG_END_STYLE = _require.TOKEN_OPEN_TAG_END_STYLE; var _require2 = require('../constants/tree-constructor-contexts'), ATTRIBUTE_CONTEXT = _require2.ATTRIBUTE_CONTEXT; function handlerAttributeStart(state) { if (state.currentNode.content.attributes === undefined) { state.currentNode.content.attributes = []; } // new empty attribute state.currentNode.content.attributes.push({}); state.currentContext = { parentRef: state.currentContext, type: ATTRIBUTE_CONTEXT }; return state; } function handleOpenTagEnd(state) { state.currentContext = state.currentContext.parentRef; return state; } module.exports = function attributes(token, state) { var ATTRIBUTE_START_TOKENS = [TOKEN_ATTRIBUTE_KEY, TOKEN_ATTRIBUTE_ASSIGNMENT]; if (ATTRIBUTE_START_TOKENS.indexOf(token.type) !== -1) { return handlerAttributeStart(state); } var ATTRIBUTES_END_TOKENS = [TOKEN_OPEN_TAG_END, TOKEN_OPEN_TAG_END_SCRIPT, TOKEN_OPEN_TAG_END_STYLE]; if (ATTRIBUTES_END_TOKENS.indexOf(token.type) !== -1) { return handleOpenTagEnd(state); } state.caretPosition++; return state; };
carefree0910/carefree-learn
cflearn/models/cv/encoder/backbone/settings/vgg.py
<reponame>carefree0910/carefree-learn from typing import List from collections import OrderedDict from ..api import Preset remove_layers: List[str] = [] target_layers = OrderedDict( slice0="stage0", slice1="stage1", slice2="stage2", slice3="stage3", ) vgg19_large_target_layers = OrderedDict( slice0="stage0", slice1="stage1", slice2="stage2", slice3="stage3_first", slice4="stage3_second", slice5="stage4", ) rep_vgg_remove_layers: List[str] = [] rep_vgg_target_layers = OrderedDict( stage1="stage1", stage2="stage2", stage3="stage3", stage4_first="stage4_first", stage4_second="stage4_second", stage5="stage5", ) @Preset.register_settings() class VGGPreset(Preset): remove_layers = { "vgg16": remove_layers, "vgg19": remove_layers, "vgg19_lite": remove_layers, "vgg19_large": remove_layers, "vgg_style": remove_layers, "rep_vgg": rep_vgg_remove_layers, "rep_vgg_lite": rep_vgg_remove_layers, "rep_vgg_large": rep_vgg_remove_layers, } target_layers = { "vgg16": target_layers, "vgg19": target_layers, "vgg19_lite": target_layers, "vgg19_large": vgg19_large_target_layers, "vgg_style": target_layers, "rep_vgg": rep_vgg_target_layers, "rep_vgg_lite": rep_vgg_target_layers, "rep_vgg_large": rep_vgg_target_layers, } increment_configs = { "vgg16": {"out_channels": [64, 128, 256, 512]}, "vgg19": {"out_channels": [64, 128, 256, 512]}, "vgg19_lite": {"out_channels": [64, 128, 256, 512]}, "vgg19_large": {"out_channels": [64, 128, 256, 512, 512, 512]}, "vgg_style": {"out_channels": [64, 128, 256, 512]}, "rep_vgg": {"out_channels": [64, 128, 256, 512, 512, 2048]}, "rep_vgg_lite": {"out_channels": [48, 48, 96, 192, 192, 1280]}, "rep_vgg_large": {"out_channels": [64, 160, 320, 640, 640, 2560]}, } __all__ = ["VGGPreset"]
petlenz/tmech
include/tmech/tensor/abs_tensor_wrapper_bones.h
<reponame>petlenz/tmech<filename>include/tmech/tensor/abs_tensor_wrapper_bones.h /*************************************************************************** * Copyright (c) <NAME> * * * * Distributed under the terms of the BSD 3-Clause License. * * * * The full license is in the file LICENSE, distributed with this software. * ****************************************************************************/ #ifndef ABS_TENSOR_WRAPPER_BONES_H #define ABS_TENSOR_WRAPPER_BONES_H namespace detail { /** * @class abs_tensor_wrapper * @brief Element-wise absolute value. * * @tparam _Tensor Tensor expression from which the * element-wise absolute value is to be taken. */ template <typename _Tensor> class abs_tensor_wrapper : public tensor_base<abs_tensor_wrapper<_Tensor>> { using data_type_tensor = typename std::remove_const<typename std::remove_reference<_Tensor>::type>::type; public: using size_type = std::size_t; using value_type = typename data_type_tensor::value_type; constexpr abs_tensor_wrapper(data_type_tensor const& __data)noexcept; constexpr abs_tensor_wrapper(abs_tensor_wrapper const& __data)noexcept; template<typename ...Indicies> constexpr inline auto operator ()(Indicies ... __indicies)const noexcept; static constexpr inline auto dimension()noexcept; static constexpr inline auto rank()noexcept; constexpr inline auto evaluate()noexcept; private: _Tensor _data; }; } // NAMESPACE DETAIL #endif // ABS_TENSOR_WRAPPER_BONES_H
vimeda/goengine
extension/amqp/amqp.go
package amqp import ( "io" "github.com/hellofresh/goengine" "github.com/streadway/amqp" ) // NotificationChannel represents a channel for notifications type NotificationChannel interface { Publish(exchange, queue string, mandatory, immediate bool, msg amqp.Publishing) error Consume(queue, consumer string, autoAck, exclusive, noLocal, noWait bool, args amqp.Table) (<-chan amqp.Delivery, error) Qos(prefetchCount, prefetchSize int, global bool) error } // setup returns a connection and channel to be used for the Queue setup func setup(url, queue string) (io.Closer, NotificationChannel, error) { conn, err := amqp.Dial(url) if err != nil { return nil, nil, err } ch, err := conn.Channel() if err != nil { return nil, nil, err } if _, err := ch.QueueDeclare(queue, true, false, false, false, nil); err != nil { return nil, nil, err } return conn, ch, nil } // DirectQueueConsume returns a Consume func that will connect to the provided AMQP server and create a queue for direct message delivery func DirectQueueConsume(amqpDSN, queue string) (Consume, error) { if _, err := amqp.ParseURI(amqpDSN); err != nil { return nil, goengine.InvalidArgumentError("amqpDSN") } if len(queue) == 0 { return nil, goengine.InvalidArgumentError("queue") } return func() (io.Closer, <-chan amqp.Delivery, error) { conn, ch, err := setup(amqpDSN, queue) if err != nil { return nil, nil, err } // Indicate we only want 1 message to be acknowledge at a time. if err := ch.Qos(1, 0, false); err != nil { return nil, nil, err } // Since there can be multiple consumers, fair distribution of deliveries is required deliveries, err := ch.Consume(queue, "", false, false, false, false, nil) return conn, deliveries, err }, nil }
Miraculous-Bots/Ladybug
commands/economy/leaderboard.js
const Discord = require("discord.js"); module.exports.run = async (prefix, cmd, client, args, message, config) => { let db = client.con; db.query("SELECT * FROM credits ORDER BY credits DESC LIMIT 10", [], async(err, results) => { let text = ""; for (let r of results){ user = await client.users.get(r.id) if (user) { text += user.tag + ": " + r.credits + "\n"; } else { text += "Unknown#0000" + ": " + r.credits + "\n"; } } let embed = new Discord.RichEmbed() .setTitle("Leaderboard - Mitsuha") .setColor("#dd2b4e") .setDescription(text) return message.channel.send(embed) }) }
tanishiking/dotty
tests/untried/neg/unchecked3.scala
<filename>tests/untried/neg/unchecked3.scala sealed trait A2[T1] final class B2[T1, T2] extends A2[T1] sealed trait A[T] final class B[T] extends A[T] sealed trait A1[T] trait B1[T] extends A1[T] trait C1[T] extends A1[T] trait D1[T] extends A1[Int] trait E1[T] extends B1[Int] trait F1[T] extends B1[T] object MiscUnchecked { /* nowarn */ def knownType1(x: A[Int]) = x match { case _: B[Int] if true => 1 } /* nowarn */ def knownType2(x: B[Int]) = x match { case _: A[Int] if true => 1 } /* nowarn */ def tparamLeakage1(x: Any) = x match { case Array() => 1 } /* nowarn */ def tparamLeakage2(x: Any) = x match { case List() => 1 } // E1[Double] implies B1[Int], but B1[Int] does not imply E1[Double], even if .isInstanceOf[E1[_]] // F1[Int] implies B1[Int], and B1[Int] implies F1[Int] /* nowarn */ def peerTypes1(x: B1[Int]) = x match { case _: C1[Int] => true } /* warn */ def peerTypes2(x: B1[Int]) = x match { case _: E1[Double] => true } /* warn */ def peerTypes3(x: B1[_]) = x match { case _: F1[Double] => true } /* nowarn */ def peerTypes4(x: B1[Int]) = x match { case _: F1[Int] => true } /* warn */ def twotypes1[T](x: B2[T, Int]) = x match { case _: A2[Int] => true } /* nowarn */ def twotypes2[T](x: B2[Int, T]) = x match { case _: A2[Int] => true } /* nowarn */ def twotypes3(x: A2[Int]) = x match { case _: B2[Int, _] => true } /* nowarn */ def twotypes4[T](x: A2[T]) = x match { case _: B2[T, _] => true } /* warn */ def twotypes5[T](x: A2[T]) = x match { case _: B2[_, Int] => true } } object Arrays { def f1(x: Any) = x match { /* nowarn */ case _: Array[Int] => () /* nowarn */ case _: Array[Boolean] => () /* nowarn */ case _: Array[String] => () /* warn */ case _: Array[List[String]] => () /* nowarn */ case _: Array[Array[String]] => () /* nowarn */ case _: Array[Array[Array[String]]] => () /* warn */ case _: Array[Array[List[String]]] => () } def f2(x: Array[_]) = x match { /* nowarn */ case _: Array[Int] => () /* nowarn */ case _: Array[Boolean] => () /* nowarn */ case _: Array[String] => () /* warn */ case _: Array[List[String]] => () /* nowarn */ case _: Array[Array[String]] => () /* nowarn */ case _: Array[Array[Array[String]]] => () /* warn */ case _: Array[Array[List[String]]] => () } def f3[T](x: Array[T]) = x match { /* nowarn */ case _: Array[Int] => () /* nowarn */ case _: Array[Boolean] => () /* nowarn */ case _: Array[String] => () /* warn */ case _: Array[List[String]] => () /* nowarn */ case _: Array[Array[String]] => () /* warn */ case _: Array[List[Array[String]]] => () /* warn */ case _: Array[Array[List[String]]] => () } } object Matching { class Q { type A type B <: A def f(xs: Traversable[B]) = xs match { /* nowarn */ case xs: List[A] => xs.head /* nowarn */ case xs: Seq[B] => xs.head /* warn */ case xs: Set[A] => xs.head } def f2[T <: B](xs: Traversable[T]) = xs match { /* nowarn */ case xs: List[B with T] => xs.head /* nowarn */ case xs: Seq[A] => xs.head /* nowarn */ case xs: Set[T] => xs.head } } }
sgerrand/homebrew
Library/Formula/csshx.rb
<filename>Library/Formula/csshx.rb class Csshx < Formula desc "Cluster ssh tool for Terminal.app" homepage "https://github.com/brockgr/csshx" url "https://csshx.googlecode.com/files/csshX-0.74.tgz" mirror "https://distfiles.macports.org/csshX/csshX-0.74.tgz" sha256 "eaa9e52727c8b28dedc87398ed33ffa2340d6d0f3ea9d261749c715cb7a0e9c8" head "https://github.com/brockgr/csshx.git" bottle :unneeded def install bin.install "csshX" end end
brunomichalski/Treinamento-javascript-coamo
Javascript/Prof. Silvano/Listas/2-variaveis-tipos-operadores/exec07.js
"use strict" // CONSTANTES PARA ESTILOS DO CARACTERE (ST = STYLE) const ST_RESET = "\x1b[0m" const ST_BRIGHT = "\x1b[1m" const ST_DIM = "\x1b[2m" const ST_UNDERSCORE = "\x1b[4m" const ST_BLINK = "\x1b[5m" const ST_REVERSE = "\x1b[7m" const ST_HIDDEN = "\x1b[8m" // CONSTANTES PARA CORES DO CARACTERE (FG = FOREGROUND) const FG_BLACK = "\x1b[30m" const FG_RED = "\x1b[31m" const FG_GREEN = "\x1b[32m" const FG_YELLOW = "\x1b[33m" const FG_BLUE = "\x1b[34m" const FG_MAGENTA = "\x1b[35m" const FG_CYAN = "\x1b[36m" const FG_WHITE = "\x1b[37m" // CONSTANTES PARA CORES DE FUNDO (BG = BACKGROUND) const BG_BLACK = "\x1b[40m" const BG_RED = "\x1b[41m" const BG_GREEN = "\x1b[42m" const BG_YELLOW = "\x1b[43m" const BG_BLUE = "\x1b[44m" const BG_MAGENTA = "\x1b[45m" const BG_CYAN = "\x1b[46m" const BG_WHITE = "\x1b[47m" console.log(ST_UNDERSCORE+" "); console.log(ST_UNDERSCORE + "CORES & ESTILOS" + FG_WHITE, ST_RESET +"\n"); console.log(FG_WHITE,ST_UNDERSCORE +"BRANCO"+ ST_RESET); console.log(FG_BLACK,ST_UNDERSCORE +"PRETO"+ ST_RESET); console.log(FG_RED,ST_UNDERSCORE +"VERMELHO"+ ST_RESET); console.log(FG_GREEN,ST_UNDERSCORE +"VERDE"+ ST_RESET); console.log(FG_BLUE,ST_UNDERSCORE +"AZUL"+ ST_RESET); console.log(FG_CYAN,ST_UNDERSCORE +"CIANO"+ ST_RESET); console.log(FG_MAGENTA,ST_UNDERSCORE +"MAGENTA"+ ST_RESET); console.log(FG_YELLOW,ST_UNDERSCORE +"AMARELO"+ ST_RESET);
shin-eunsu/POCU_Cplusplus
Assignment2/DeusExMachina.cpp
#include "DeusExMachina.h" namespace assignment2 { DeusExMachina* DeusExMachina::GetInstance() { static DeusExMachina* deusExMachina = new DeusExMachina(); return deusExMachina; } void DeusExMachina::Travel() const { } bool DeusExMachina::AddVehicle(Vehicle* vehicle) { if (mDeusCnt > 10 || vehicle == nullptr) { return false; } if (mDeusCnt < MAXCOUNT) { //mVehicle[mDeusCnt++] = vehicle; } return true; } bool DeusExMachina::RemoveVehicle(unsigned int i) { if (i < mDeusCnt) { if (i != mDeusCnt - 1) { for (unsigned int cnt = i; cnt < mDeusCnt - 1; cnt++) { mVehicle[cnt] = mVehicle[cnt + 1]; } } mVehicle[--mDeusCnt] = nullptr; return true; } return false; } const Vehicle* DeusExMachina::GetFurthestTravelled() const { return NULL; } }
SpectrumBroad/xible
app/Flow/index.js
'use strict'; const { EventEmitter } = require('events'); const debug = require('debug'); const fs = require('fs'); const path = require('path'); // lazy requires let sanitizePath; let express; const flowDebug = debug('xible:flow'); /** * Does a very simple html encode of the input string. * Only replaces double quote, greater than and less than. * @param {String} str The String to html encode. * @returns {String} */ function baseHtmlEncode(str) { return str.replace(/"/g, '&quot;') .replace(/</g, '&lt;') .replace(/>/g, '&gt;'); } module.exports = (XIBLE, EXPRESS_APP) => { // global output caching let globalOutputs = null; // caching // default init level for flows const initLevel = XIBLE.Config.getValue('flows.initlevel'); if (!XIBLE.child && !express) { express = require('express'); } /** * Flow class */ class Flow extends EventEmitter { constructor() { super(); this._id = null; this.name = null; this.json = null; this.nodes = []; this.connectors = []; this.initLevel = initLevel; this.instances = []; this.emptyInitInstance = null; this._deleted = false; } static get INITLEVEL_NONE() { return 0; } static get INITLEVEL_FLOW() { return 1; } static get INITLEVEL_NODES() { return 2; } /** * Init flows from a given path. * This will parse all json files except for _status.json into flows. * Note that a path cannot be initiated twice because it is used for saveStatuses() * @param {String} flowPath The path to the directory containing the flows. * @param {Boolean} cleanVault Indicates whether the json data from each flow * needs vault sanitizing. * @return {Promise.<Object.<String, Flow>>} List of flows by their _id. */ static async initFromPath(flowPath, cleanVault) { flowDebug(`init flows from "${flowPath}"`); if (this.flowPath) { throw new Error(`cannot init multiple flow paths. "${this.flowPath}" already init`); } this.flowPath = flowPath; // check that flowPath exists if (!fs.existsSync(flowPath)) { flowDebug(`creating "${flowPath}"`); fs.mkdirSync(flowPath); } // will hold the flows by their _id const flows = {}; // get the files in the flowPath let files; try { files = fs.readdirSync(flowPath); } catch (err) { flowDebug(`could not readdir "${flowPath}": ${err}`); files = []; } await Promise.all(files.map(async (file) => { try { const flow = await this.initOneFromPath(flowPath, file, cleanVault); if (flow) { flows[flow._id] = flow; } } catch (err) { flowDebug(`could not init "${file}": ${err.stack}`); } })); return flows; } /** * Init a single flow from a given path and filename. * This will parse the json file into a flows. * @param {String} flowPath The path to the directory containing the fileName. * @param {String} fileName The name of the file to to parse. * @param {Boolean} cleanVault Indicates whether the json data from each flow * needs vault sanitizing. * @returns {Promise.<Flow>} A single Flow object. * @since 0.16.0 */ static initOneFromPath(flowPath, fileName, cleanVault) { return new Promise((resolve, reject) => { if (flowPath !== this.flowPath) { reject(new Error(`flowPath "${this.flowPath}" already initialized and differs from "${flowPath}"`)); return; } const filePath = `${flowPath}/${fileName}`; if ( fileName.substring(0, 1) !== '_' && fileName.substring(0, 1) !== '.' && fs.statSync(filePath).isFile() && path.extname(filePath) === '.json' ) { fs.readFile(filePath, { encoding: 'utf8' }, (err, data) => { if (err) { reject(err); return; } try { const json = JSON.parse(data); if (json._id) { const flow = new Flow(); flow.initJson(json, cleanVault); resolve(flow); } } catch (flowParseErr) { reject(flowParseErr); } }); } else { resolve(); } }); } /** * Initializes all flows from a given path, by running them through initFromPath(). * Processes the related flow statuses and starts/inits where necessary. * @param {String} flowPath The path to the directory containing the flows. * @returns {Promise.<Object.<String, Flow>>} List of flows by their _id. * @since 0.5.0 */ static async init(flowPath) { const flows = await this.initFromPath(flowPath); // start all flows which had status running before // also do some cleaning while we're at it const statuses = this.getStatuses(); const preStatusesLength = Object.keys(statuses).length; for (const flowId in statuses) { // if a flow doesn't exist anymore, remove it from the statuses if ( !flows[flowId] || !Array.isArray(statuses[flowId]) ) { delete statuses[flowId]; continue; } statuses[flowId].forEach(async (instanceStatus) => { if (instanceStatus.state === XIBLE.FlowInstance.STATE_STARTED) { try { const instance = flows[flowId].createInstance({ params: instanceStatus.params }); instance._id = instanceStatus._id; await instance.forceStart(); } catch (err) { flowDebug(`failed to start "${flowId}": ${err}`); } } }); } flowDebug(`cleared ${preStatusesLength - Object.keys(statuses).length} statuses`); return flows; } /** * Validates if writing to the flow path is possible/allowed * @returns {Promise.<Boolean>} true or false */ static validatePermissions() { return new Promise((resolve) => { if (!this.flowPath) { resolve(false); } // check if we can write fs.access(this.flowPath, fs.W_OK, (err) => { if (err) { resolve(false); return; } resolve(true); }); }); } /** * Get all flow statuses. * @return {Object.<String, Boolean>} The statuses per flow name. */ static getStatuses() { if (!this.flowPath) { throw new Error('Cannot get statuses; flows have not been loaded from path'); } if (this._statuses) { return this._statuses; } let statuses = {}; try { statuses = JSON.parse(fs.readFileSync(`${this.flowPath}/_status.json`)); flowDebug(`found ${Object.keys(statuses).length} statuses`); } catch (err) { flowDebug(`"${this.flowPath}/_status.json" cannot be opened: ${err}`); } this._statuses = statuses; return statuses; } /** * Save the given statuses to the filesystem. * @param {Object.<String, Boolean>} statuses */ static saveStatuses(statuses) { if (!this.flowPath) { return; } this._statuses = statuses; try { fs.writeFileSync(`${this.flowPath}/_status.json`, JSON.stringify(statuses)); } catch (err) { flowDebug(`error saving status to "${this.flowPath}/_status.json": ${err}`); } } /** * Validates that the _id/name of a flow does not contain illegal characters. * @param {String} _id The _id/name of the flow to be validated. * @returns {Boolean} Returns a boolean indicating whether the given _id is allowed (true), * or not (false). * @since 0.5.0 */ static validateId(_id) { if (!sanitizePath) { sanitizePath = require('sanitize-filename'); } return _id === sanitizePath(_id); } /** * Init a flow, including all its nodes and connectors, from an object. * @param {Object} json * @param {String} json._id * @param {Array} json.nodes * @param {Array} json.connectors * @param {Boolean} cleanVault Indicates whether the json data needs vault sanitizing. */ initJson(json, cleanVault) { flowDebug(`initJson on "${json._id}"`); if (!json || !json._id) { throw new Error('object containing _id as argument is required'); } // only perform this filename check in master for performance reasons if (!XIBLE.child) { if (!Flow.validateId(json._id)) { const err = new Error('flow _id/name cannot contain reserved/unsave characters'); err.code = 'ERR_FLOW_NAMING'; throw err; } } this._id = json._id; this.name = this._id; json.name = this.name; this.json = json; this.nodes = []; this.connectors = []; this.runnable = true; // get the nodes for (let i = 0; i < json.nodes.length; i += 1) { const node = json.nodes[i]; const nodeConstr = XIBLE.getNodeByName(node.name); let xibleNode; // init a dummy node directly based on the json // and ensure the flow is set to not runnable if (!nodeConstr) { flowDebug(`Node "${node.name}" does not exist`); xibleNode = new XIBLE.Node(node); xibleNode.nodeExists = false; xibleNode.data = node.data; this.runnable = false; } else { // init a working node xibleNode = new XIBLE.Node({ ...nodeConstr, _id: node._id, data: { ...node.data }, left: node.left, top: node.top }); if (!xibleNode) { throw new Error(`Could not construct node '${node.name}'`); } // check for data keys that should be vaulted // remove those from the json (the json is used for saving) // save the vault data thereafter if (cleanVault) { const nodeVaultKeys = nodeConstr.vault; const nodeVaultData = {}; if (nodeVaultKeys && Array.isArray(nodeVaultKeys)) { for (const dataKey in node.data) { if (nodeVaultKeys.includes(dataKey)) { nodeVaultData[dataKey] = node.data[dataKey]; delete node.data[dataKey]; } } } if (Object.keys(nodeVaultData).length) { xibleNode.vault.set(Object.assign(xibleNode.vault.get() || {}, nodeVaultData)); } } else if (xibleNode.vault) { Object.assign(xibleNode.data, xibleNode.vault.get()); } // host routes if (!XIBLE.child && nodeConstr.routePaths.flow) { try { const router = express.Router(); EXPRESS_APP.use(`/api/nodes/${xibleNode.name}/routes/flow/${xibleNode._id}/`, router); require(nodeConstr.routePaths.flow)(xibleNode, router, express.static); } catch (err) { console.error(err); } } } this.addNode(xibleNode); for (const name in node.inputs) { const nodeInput = node.inputs[name]; let xibleNodeInput = xibleNode.inputs[name]; if (!xibleNodeInput) { flowDebug(`Node "${node.name}" does not have input "${name}"`); xibleNodeInput = xibleNode.addInput(name, nodeInput); xibleNode.nodeExists = false; this.runnable = false; } xibleNodeInput._id = nodeInput._id; xibleNodeInput.global = nodeInput.global; xibleNodeInput.type = nodeInput.type; } for (const name in node.outputs) { const nodeOutput = node.outputs[name]; let xibleNodeOutput = xibleNode.outputs[name]; if (!xibleNodeOutput) { flowDebug(`Node "${node.name}" does not have output "${name}"`); xibleNodeOutput = xibleNode.addOutput(name, nodeOutput); xibleNode.nodeExists = false; this.runnable = false; } xibleNodeOutput._id = nodeOutput._id; xibleNodeOutput.global = nodeOutput.global || false; xibleNodeOutput.type = nodeOutput.type; } // construct a dummy editorContents if (!xibleNode.nodeExists) { xibleNode.editorContent = ''; for (const key in xibleNode.data) { xibleNode.editorContent += `<input type="text" placeholder="${baseHtmlEncode(key)}" data-outputvalue="${baseHtmlEncode(key)}" required="required" />`; } } } // get the connectors for (let i = 0; i < json.connectors.length; i += 1) { const origin = this.getOutputById(json.connectors[i].origin); if (!origin) { flowDebug(`Cannot find output by id "${json.connectors[i].origin}"`); this.runnable = false; continue; } const destination = this.getInputById(json.connectors[i].destination); if (!destination) { flowDebug(`Cannot find input by id "${json.connectors[i].destination}"`); this.runnable = false; continue; } const xibleConnector = { origin, destination }; origin.connectors.push(xibleConnector); destination.connectors.push(xibleConnector); } XIBLE.addFlow(this); this.emit('initJson'); if (!XIBLE.child && this.initLevel === Flow.INITLEVEL_FLOW) { if (this.emptyInitInstance) { this.emptyInitInstance.delete(); } else { this.createEmptyInitInstance(); } } } /** * Saves a flow to the configured flows directory. * Rejects if this is not the master thread. * @return {Promise.<Flow>} */ save() { return new Promise((resolve, reject) => { if (XIBLE.child || !this._id || !Flow.flowPath) { reject('not master, no _id or no flowPath specified'); return; } flowDebug(`saving "${this._id}"`); fs.writeFile(`${Flow.flowPath}/${this._id}.json`, JSON.stringify(this.json, null, '\t'), () => { this.emit('save'); resolve(this); }); }); } /** * Deletes a flow from the configured flows directory. * Stops all instances first. * Rejects if this is not the master thread. * @return {Promise.<Flow>} */ delete() { return new Promise(async (resolve, reject) => { if (XIBLE.child || !this._id || !Flow.flowPath) { reject('not master, no _id or no flowPath specified'); return; } this._deleted = true; // stop all instances await this.deleteAllInstances(); this.instances = []; flowDebug(`deleting "${this._id}"`); fs.unlink(`${Flow.flowPath}/${this._id}.json`, () => { resolve(this); }); // update status this.saveStatus(); // remove from Xible instance if (XIBLE) { delete XIBLE.flows[this._id]; } this.emit('delete'); }); } /** * Adds a node to the flow. * @param {Node} node The node to add. * @return {Node} */ addNode(node) { node.flow = this; // track direct triggers of nodes node.prependListener('triggerStartTime', (type) => { const d = new Date(); // node._trackerTriggerTime = d.getTime(); node.setTracker({ message: `${type === 'output' ? 'hit' : 'start'} @ ${d.getHours()}:${d.getMinutes()}:${d.getSeconds()}:${d.getMilliseconds()}`, timeout: 3000 }); }); // add and return this.nodes.push(node); return node; } /** * Returns a node from a specific flow by the node._id. * @param {Number} id The _id of the node to be found. * @return {Node|undefined} The found node. */ getNodeById(id) { return this.nodes.find((node) => node._id === id); } /** * Returns an input for any node by the input._id. * @param {Number} id the _id of the nodeInput to be found. * @return {NodeInput|null} The found nodeInput, or null if none. */ getInputById(id) { for (let i = 0; i < this.nodes.length; i += 1) { const node = this.nodes[i]; for (const name in node.inputs) { if (node.inputs[name]._id === id) { return node.inputs[name]; } } } return null; } /** * Returns an output for any node by the output._id. * @param {Number} id the _id of the nodeOutput to be found. * @return {NodeOutput|null} The found nodeOutput, or null if none. */ getOutputById(id) { for (let i = 0; i < this.nodes.length; i += 1) { const node = this.nodes[i]; for (const name in node.outputs) { if (node.outputs[name]._id === id) { return node.outputs[name]; } } } return null; } /** * Returns all global outputs with a given type. * @param {String} type Type of global outputs to be fetched. * @return {NodeOutput[]} The global nodeOutputs. */ getGlobalOutputsByType(type) { if (globalOutputs) { return globalOutputs.filter((globalOutput) => globalOutput.type === type); } const outputs = []; globalOutputs = []; for (let i = 0; i < this.nodes.length; i += 1) { const node = this.nodes[i]; for (const name in node.outputs) { const output = node.outputs[name]; if (output.global) { globalOutputs.push(output); if (output.type === type) { outputs.push(output); } } } } return outputs; } /** * Saves the status for this flow by calling Flow.saveStatuses(). */ saveStatus() { if (XIBLE.stopping) { return; } const statuses = Flow.getStatuses(); const startedInstances = this.instances .filter( (instance) => instance.state === XIBLE.FlowInstance.STATE_STARTED && !instance.directed ); if (!startedInstances.length) { delete statuses[this._id]; } else { statuses[this._id] = startedInstances.map((instance) => { const status = { _id: instance._id, state: instance.state }; if (instance.params && Object.keys(instance.params).length) { status.params = instance.params; } return status; }); } Flow.saveStatuses(statuses); } createEmptyInitInstance() { this.emptyInitInstance = this.createInstance(); const recreate = () => { if (this.emptyInitInstance) { this.emptyInitInstance.removeEmptyInitInstanceListeners(); this.emptyInitInstance = null; } if (this._deleted || XIBLE.stopping) { return; } this.createEmptyInitInstance(); }; this.emptyInitInstance.removeEmptyInitInstanceListeners = function removeEmptyInitInstanceListeners() { this.removeListener('stopping', recreate); this.removeListener('stopped', recreate); this.removeListener('delete', recreate); }; this.emptyInitInstance.on('stopping', recreate); this.emptyInitInstance.on('stopped', recreate); this.emptyInitInstance.on('delete', recreate); return this.emptyInitInstance.init(); } /** * Creates a new instance of this flow. * This instance can be started/stopped etc. * @param {Object} options * @param {Object} options.params * @param {Object} options.directNodes * @returns {FlowInstance} */ createInstance({ params, directNodes } = {}) { if (!this.runnable) { throw new Error(`Flow "${this._id}" is not runnable`); } if (this.initLevel === Flow.INITLEVEL_FLOW && this.emptyInitInstance) { const { emptyInitInstance } = this; emptyInitInstance.params = params; emptyInitInstance.directNodes = directNodes; this.emptyInitInstance.removeEmptyInitInstanceListeners(); this.emptyInitInstance = null; this.createEmptyInitInstance(); return emptyInitInstance; } const createStart = process.hrtime(); const flowInstance = new XIBLE.FlowInstance(this, params, directNodes); this.instances.push(flowInstance); flowInstance.timing.createDate = Date.now(); flowInstance.timing.createStart = createStart; flowInstance.timing.createEnd = process.hrtime(); this.emit('createInstance', { flowInstance }); return flowInstance; } /** * Delete the provided instance for this flow. * @param {FlowInstance} instance * @returns {Promise} */ async deleteInstance(instance) { if (!(instance instanceof XIBLE.FlowInstance)) { throw new TypeError('argument "instance" must be instanceof FlowInstance'); } const index = this.instances.indexOf(instance); if (index === -1) { throw new TypeError('argument "instance" is not instance of this flow'); } this.instances.splice(index, 1); if (instance.state !== XIBLE.FlowInstance.STATE_STOPPED) { await instance.forceStop(false); } this.emit('deleteInstance', { flowInstance: instance }); } /** * Returns the instance on this flow for the given id. * @param {String} id The _id of the FlowInstance to return. * @returns {FlowInstance|null} */ getInstanceById(id) { return this.instances.find((instance) => instance._id === id); } /** * Stops all instances for this flow. * @returns {Promise.<Flow>} Returns the current flow for daisy chaining. */ async stopAllInstances() { await Promise.all( this.instances.map((instance) => { if (instance !== this.emptyInitInstance) { return instance.forceStop(); } return null; }) ); return this; } /** * Deletes all instances for this flow. * @returns {Promise.<Flow>} Returns the current flow for daisy chaining. */ async deleteAllInstances() { await Promise.all(this.instances.slice().map((instance) => instance.delete())); return this; } /** * Publishes the flow to the registry. */ async publish(altName) { if (!XIBLE.Config.getValue('registry.flows.allowpublish')) { const err = new Error('Your config does not allow to publish flows to the registry'); err.code = 'ERR_CONFIG'; throw err; } let flowJson = this.json; if (altName) { if (!Flow.validateId(altName)) { const err = new Error('flow _id/name cannot contain reserved/unsave characters'); err.code = 'ERR_FLOW_NAMING'; throw err; } flowJson = { ...this.json }; flowJson._id = altName; flowJson.name = altName; } // verify that we have a token const token = XIBLE.Registry.getUserToken(); if (!token) { const err = new Error('Not logged in.'); err.code = 'ERR_REGISTRY_NOT_LOGGED_IN'; throw err; } // verify that we're logged in return XIBLE.Registry.User .getByToken(token) .catch((getUserErr) => Promise.reject(new Error(`Failed to get user from token: ${getUserErr}`))) .then((user) => { if (!user) { const err = new Error('User could not be verified. Please login using "xiblepm user login".'); err.code = 'ERR_REGISTRY_NOT_LOGGED_IN'; return Promise.reject(err); } // publish return XIBLE.Registry.Flow .publish(flowJson); }); } emit(eventName, obj) { super.emit(eventName, obj); const broadcastWebSocketObj = { method: `xible.flow.${eventName}`, flow: this }; if (obj && typeof obj === 'object') { Object.assign(broadcastWebSocketObj, obj); } XIBLE.broadcastWebSocket(broadcastWebSocketObj); } toJSON() { return { _id: this._id, name: this._id, nodes: this.nodes, connectors: this.json.connectors, viewState: this.json.viewState, runnable: this.runnable }; } } if (EXPRESS_APP) { require('./routes.js')(Flow, XIBLE, EXPRESS_APP); } return { Flow }; };
holon-platform/holon-vaadin7
navigator/src/main/java/com/holonplatform/vaadin7/navigator/internal/DefaultViewContextField.java
/* * Copyright 2016-2017 Axioma srl. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.holonplatform.vaadin7.navigator.internal; import java.lang.reflect.Field; /** * Default {@link ViewContextField} implementation. * * @since 5.0.0 */ public class DefaultViewContextField implements ViewContextField { /** * Optional context resource key to inject */ private final String contextResourceKey; /** * Required injection */ private final boolean required; /** * Field reference */ private final Field field; /** * Constructor * @param contextResourceKey Context resource key * @param required Whether the injection is required * @param field Field reference */ public DefaultViewContextField(String contextResourceKey, boolean required, Field field) { super(); this.contextResourceKey = contextResourceKey; this.required = required; this.field = field; } /* * (non-Javadoc) * @see com.holonplatform.vaadin.navigator.internal.ViewContextField#getContextResourceKey() */ @Override public String getContextResourceKey() { return contextResourceKey; } /* * (non-Javadoc) * @see com.holonplatform.vaadin.navigator.internal.ViewContextField#isRequired() */ @Override public boolean isRequired() { return required; } /* * (non-Javadoc) * @see com.holonplatform.vaadin.navigator.internal.ViewContextField#getField() */ @Override public Field getField() { return field; } }
onap/ncomp-docker
ncomp-docker-model/src/main/xcore-gen/org/openecomp/ncomp/docker/impl/ContainerMemoryUsageImpl.java
/*- * ============LICENSE_START========================================== * OPENECOMP - DCAE * =================================================================== * Copyright (c) 2017 AT&T Intellectual Property. All rights reserved. * =================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ============LICENSE_END============================================ */ /** */ package org.openecomp.ncomp.docker.impl; import org.openecomp.ncomp.docker.ContainerMemoryStats; import org.openecomp.ncomp.docker.ContainerMemoryUsage; import org.openecomp.ncomp.docker.DockerPackage; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Container Memory Usage</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.openecomp.ncomp.docker.impl.ContainerMemoryUsageImpl#getMemstats <em>Memstats</em>}</li> * <li>{@link org.openecomp.ncomp.docker.impl.ContainerMemoryUsageImpl#getMax_usage <em>Max usage</em>}</li> * <li>{@link org.openecomp.ncomp.docker.impl.ContainerMemoryUsageImpl#getUsage <em>Usage</em>}</li> * <li>{@link org.openecomp.ncomp.docker.impl.ContainerMemoryUsageImpl#getFailcnt <em>Failcnt</em>}</li> * <li>{@link org.openecomp.ncomp.docker.impl.ContainerMemoryUsageImpl#getLimit <em>Limit</em>}</li> * </ul> * * @generated */ public class ContainerMemoryUsageImpl extends MinimalEObjectImpl.Container implements ContainerMemoryUsage { /** * The cached value of the '{@link #getMemstats() <em>Memstats</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMemstats() * @generated * @ordered */ protected ContainerMemoryStats memstats; /** * The default value of the '{@link #getMax_usage() <em>Max usage</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMax_usage() * @generated * @ordered */ protected static final int MAX_USAGE_EDEFAULT = 0; /** * The cached value of the '{@link #getMax_usage() <em>Max usage</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMax_usage() * @generated * @ordered */ protected int max_usage = MAX_USAGE_EDEFAULT; /** * The default value of the '{@link #getUsage() <em>Usage</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getUsage() * @generated * @ordered */ protected static final int USAGE_EDEFAULT = 0; /** * The cached value of the '{@link #getUsage() <em>Usage</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getUsage() * @generated * @ordered */ protected int usage = USAGE_EDEFAULT; /** * The default value of the '{@link #getFailcnt() <em>Failcnt</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getFailcnt() * @generated * @ordered */ protected static final int FAILCNT_EDEFAULT = 0; /** * The cached value of the '{@link #getFailcnt() <em>Failcnt</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getFailcnt() * @generated * @ordered */ protected int failcnt = FAILCNT_EDEFAULT; /** * The default value of the '{@link #getLimit() <em>Limit</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLimit() * @generated * @ordered */ protected static final int LIMIT_EDEFAULT = 0; /** * The cached value of the '{@link #getLimit() <em>Limit</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getLimit() * @generated * @ordered */ protected int limit = LIMIT_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ContainerMemoryUsageImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return DockerPackage.Literals.CONTAINER_MEMORY_USAGE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ContainerMemoryStats getMemstats() { return memstats; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetMemstats(ContainerMemoryStats newMemstats, NotificationChain msgs) { ContainerMemoryStats oldMemstats = memstats; memstats = newMemstats; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, DockerPackage.CONTAINER_MEMORY_USAGE__MEMSTATS, oldMemstats, newMemstats); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setMemstats(ContainerMemoryStats newMemstats) { if (newMemstats != memstats) { NotificationChain msgs = null; if (memstats != null) msgs = ((InternalEObject)memstats).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - DockerPackage.CONTAINER_MEMORY_USAGE__MEMSTATS, null, msgs); if (newMemstats != null) msgs = ((InternalEObject)newMemstats).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - DockerPackage.CONTAINER_MEMORY_USAGE__MEMSTATS, null, msgs); msgs = basicSetMemstats(newMemstats, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, DockerPackage.CONTAINER_MEMORY_USAGE__MEMSTATS, newMemstats, newMemstats)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public int getMax_usage() { return max_usage; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setMax_usage(int newMax_usage) { int oldMax_usage = max_usage; max_usage = newMax_usage; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, DockerPackage.CONTAINER_MEMORY_USAGE__MAX_USAGE, oldMax_usage, max_usage)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public int getUsage() { return usage; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setUsage(int newUsage) { int oldUsage = usage; usage = newUsage; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, DockerPackage.CONTAINER_MEMORY_USAGE__USAGE, oldUsage, usage)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public int getFailcnt() { return failcnt; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setFailcnt(int newFailcnt) { int oldFailcnt = failcnt; failcnt = newFailcnt; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, DockerPackage.CONTAINER_MEMORY_USAGE__FAILCNT, oldFailcnt, failcnt)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public int getLimit() { return limit; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setLimit(int newLimit) { int oldLimit = limit; limit = newLimit; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, DockerPackage.CONTAINER_MEMORY_USAGE__LIMIT, oldLimit, limit)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case DockerPackage.CONTAINER_MEMORY_USAGE__MEMSTATS: return basicSetMemstats(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case DockerPackage.CONTAINER_MEMORY_USAGE__MEMSTATS: return getMemstats(); case DockerPackage.CONTAINER_MEMORY_USAGE__MAX_USAGE: return getMax_usage(); case DockerPackage.CONTAINER_MEMORY_USAGE__USAGE: return getUsage(); case DockerPackage.CONTAINER_MEMORY_USAGE__FAILCNT: return getFailcnt(); case DockerPackage.CONTAINER_MEMORY_USAGE__LIMIT: return getLimit(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case DockerPackage.CONTAINER_MEMORY_USAGE__MEMSTATS: setMemstats((ContainerMemoryStats)newValue); return; case DockerPackage.CONTAINER_MEMORY_USAGE__MAX_USAGE: setMax_usage((Integer)newValue); return; case DockerPackage.CONTAINER_MEMORY_USAGE__USAGE: setUsage((Integer)newValue); return; case DockerPackage.CONTAINER_MEMORY_USAGE__FAILCNT: setFailcnt((Integer)newValue); return; case DockerPackage.CONTAINER_MEMORY_USAGE__LIMIT: setLimit((Integer)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case DockerPackage.CONTAINER_MEMORY_USAGE__MEMSTATS: setMemstats((ContainerMemoryStats)null); return; case DockerPackage.CONTAINER_MEMORY_USAGE__MAX_USAGE: setMax_usage(MAX_USAGE_EDEFAULT); return; case DockerPackage.CONTAINER_MEMORY_USAGE__USAGE: setUsage(USAGE_EDEFAULT); return; case DockerPackage.CONTAINER_MEMORY_USAGE__FAILCNT: setFailcnt(FAILCNT_EDEFAULT); return; case DockerPackage.CONTAINER_MEMORY_USAGE__LIMIT: setLimit(LIMIT_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case DockerPackage.CONTAINER_MEMORY_USAGE__MEMSTATS: return memstats != null; case DockerPackage.CONTAINER_MEMORY_USAGE__MAX_USAGE: return max_usage != MAX_USAGE_EDEFAULT; case DockerPackage.CONTAINER_MEMORY_USAGE__USAGE: return usage != USAGE_EDEFAULT; case DockerPackage.CONTAINER_MEMORY_USAGE__FAILCNT: return failcnt != FAILCNT_EDEFAULT; case DockerPackage.CONTAINER_MEMORY_USAGE__LIMIT: return limit != LIMIT_EDEFAULT; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (max_usage: "); result.append(max_usage); result.append(", usage: "); result.append(usage); result.append(", failcnt: "); result.append(failcnt); result.append(", limit: "); result.append(limit); result.append(')'); return result.toString(); } } //ContainerMemoryUsageImpl
PhilippePerret/Montrello2
js/required-2/system/IO.js
'use strict'; /** Utilitaires d'entrée sortie **/ class IO { /** +path+ Le path complet ou relatif (si +folder+ est fourni) +folder+ Le dossier si +path+ est fourni en chemin relatif **/ static openInFinder(path, folder){ Ajax.send('system/open-in-finder.rb', {path:path, folder:folder}) .then(onAjaxSuccess).catch(onError) } }
Breakaway-Dezigns-Agency/bad-portfolio-21
src/components/layout/container-full.js
import * as React from "react" import PropTypes from "prop-types" const ContainerFull = ({ className, children }) => { return ( <section className={`max-w-full mx-auto ${className !== '' ? className : ''}`}> {children} </section> ) } ContainerFull.propTypes = { className: PropTypes.node.isRequired, children: PropTypes.node.isRequired, } export default ContainerFull
Apodead/BoxRouter
html/search/defines_1.js
var searchData= [ ['address',['ADDRESS',['../BoxRouter_8h.html#a280feb883e9d4a7edcc69c8bcb9f38f2',1,'BoxRouter.h']]] ];
PkayJava/fintech
src/main/java/com/angkorteam/fintech/pages/client/center/CenterClosePage.java
package com.angkorteam.fintech.pages.client.center; import org.apache.wicket.authroles.authorization.strategies.role.annotations.AuthorizeInstantiation; import com.angkorteam.fintech.Page; import com.angkorteam.fintech.dto.Function; @AuthorizeInstantiation(Function.ALL_FUNCTION) public class CenterClosePage extends Page { @Override protected void initData() { } @Override protected void initComponent() { } @Override protected void configureMetaData() { } }
mikenavarroro/ProgramacionII
Challenge/intercambio.c
#include <stdio.h> int main(){ int n, aux; scanf("%d", &n); int numeros[n]; for(int i = 0; i < n; i++){ scanf("%d", &numeros[i]); } for(int i = 0; i < (n/2); i++){ aux = numeros[i]; numeros[i] = numeros[n-1-i]; numeros[n-1-i] = aux; } for(int i = 0; i < n ; i++){ printf("%d ", numeros[i]); } printf("\n"); }
arshajii/rho
src/runtime/opcodes.h
<reponame>arshajii/rho<filename>src/runtime/opcodes.h<gh_stars>1-10 #ifndef RHO_OPCODE_H #define RHO_OPCODE_H typedef enum { RHO_INS_NOP = 0x30, RHO_INS_LOAD_CONST, RHO_INS_LOAD_NULL, RHO_INS_LOAD_ITER_STOP, RHO_INS_ADD, RHO_INS_SUB, RHO_INS_MUL, RHO_INS_DIV, RHO_INS_MOD, RHO_INS_POW, RHO_INS_BITAND, RHO_INS_BITOR, RHO_INS_XOR, RHO_INS_BITNOT, RHO_INS_SHIFTL, RHO_INS_SHIFTR, RHO_INS_AND, RHO_INS_OR, RHO_INS_NOT, RHO_INS_EQUAL, RHO_INS_NOTEQ, RHO_INS_LT, RHO_INS_GT, RHO_INS_LE, RHO_INS_GE, RHO_INS_UPLUS, RHO_INS_UMINUS, RHO_INS_IADD, RHO_INS_ISUB, RHO_INS_IMUL, RHO_INS_IDIV, RHO_INS_IMOD, RHO_INS_IPOW, RHO_INS_IBITAND, RHO_INS_IBITOR, RHO_INS_IXOR, RHO_INS_ISHIFTL, RHO_INS_ISHIFTR, RHO_INS_MAKE_RANGE, RHO_INS_IN, RHO_INS_STORE, RHO_INS_STORE_GLOBAL, RHO_INS_LOAD, RHO_INS_LOAD_GLOBAL, RHO_INS_LOAD_ATTR, RHO_INS_SET_ATTR, RHO_INS_LOAD_INDEX, RHO_INS_SET_INDEX, RHO_INS_APPLY, RHO_INS_IAPPLY, RHO_INS_LOAD_NAME, RHO_INS_PRINT, RHO_INS_JMP, RHO_INS_JMP_BACK, RHO_INS_JMP_IF_TRUE, RHO_INS_JMP_IF_FALSE, RHO_INS_JMP_BACK_IF_TRUE, RHO_INS_JMP_BACK_IF_FALSE, RHO_INS_JMP_IF_TRUE_ELSE_POP, RHO_INS_JMP_IF_FALSE_ELSE_POP, RHO_INS_CALL, RHO_INS_RETURN, RHO_INS_THROW, RHO_INS_PRODUCE, RHO_INS_TRY_BEGIN, RHO_INS_TRY_END, RHO_INS_JMP_IF_EXC_MISMATCH, RHO_INS_MAKE_LIST, RHO_INS_MAKE_TUPLE, RHO_INS_MAKE_SET, RHO_INS_MAKE_DICT, RHO_INS_IMPORT, RHO_INS_EXPORT, RHO_INS_EXPORT_GLOBAL, RHO_INS_EXPORT_NAME, RHO_INS_RECEIVE, RHO_INS_GET_ITER, RHO_INS_LOOP_ITER, RHO_INS_MAKE_FUNCOBJ, RHO_INS_MAKE_GENERATOR, RHO_INS_MAKE_ACTOR, RHO_INS_SEQ_EXPAND, RHO_INS_POP, RHO_INS_DUP, RHO_INS_DUP_TWO, RHO_INS_ROT, RHO_INS_ROT_THREE } RhoOpcode; typedef enum { RHO_ST_ENTRY_BEGIN = 0x10, RHO_ST_ENTRY_END } RhoSTCode; typedef enum { RHO_CT_ENTRY_BEGIN = 0x20, RHO_CT_ENTRY_INT, RHO_CT_ENTRY_FLOAT, RHO_CT_ENTRY_STRING, RHO_CT_ENTRY_CODEOBJ, RHO_CT_ENTRY_END } RhoCTCode; #endif /* RHO_OPCODE_H */
HarumeJs/Discord-Bot
commands/Fun/urban.js
<reponame>HarumeJs/Discord-Bot<filename>commands/Fun/urban.js // Dependencies const ud = require('urban-dictionary'); const Discord = require('discord.js'); module.exports.run = async (bot, message, args, emojis, settings) => { // Get phrase const phrase = args.join(' '); if (!phrase) return message.channel.send({ embed:{ color:15158332, description:`${emojis[0]} Please use the format \`${bot.commands.get('urban').help.usage.replace('${PREFIX}', settings.prefix)}\`.` } }).then(m => m.delete({ timeout: 5000 })); // Search up phrase in urban dictionary ud.term(`${phrase}`, (error, entries) => { if (error) { bot.logger.error(`Urban Dictionary: ${error.code} (phrase: ${phrase})`); message.channel.send({ embed:{ color:15158332, description:`${emojis[0]} Phrase: \`${phrase}\` was not found on urban dictionary.` } }).then(m => m.delete({ timeout: 5000 })); } else { // send message const embed = new Discord.MessageEmbed() .setTitle(`Definition of ${phrase}`) .setURL(entries[0].permalink) .setThumbnail('https://i.imgur.com/VFXr0ID.jpg') .setDescription(`${entries[0].definition}\n**Example:**\n${entries[0].example}`) .addField('👍', entries[0].thumbs_up, true) .addField('👎', entries[0].thumbs_down, true); message.channel.send(embed); } }); }; module.exports.config = { command: 'urban', permissions: ['SEND_MESSAGES', 'EMBED_LINKS'], }; module.exports.help = { name: 'Urban', category: 'Fun', description: 'Get the urban dictionary of a word', usage: '${PREFIX}urban <word>', example: '${PREFIX}urban watermelon sugar', };
gesslar/shadowgate
d/avatars/circe/testsword.c
#include <std.h> inherit "/d/common/obj/weapon/longsword.c"; void create(){ ::create(); set_name("<NAME>"); add_id(({ "you better work", "dammit" })); set_short("Long Sword"); set_obvious_short("long sword"); set_long( @AVATAR This is a long sword. AVATAR ); set_value(150); set_property("enchantment",4); set_ac(0); }
rosiecowling/specialist-publisher
lib/services.rb
require "gds_api/publishing_api_v2" require "gds_api/asset_manager" require "gds_api/email_alert_api" module Services def self.publishing_api @publishing_api ||= GdsApi::PublishingApiV2.new( Plek.new.find("publishing-api"), bearer_token: ENV["PUBLISHING_API_BEARER_TOKEN"] || "example", timeout: 10, ) end def self.asset_api @asset_api ||= GdsApi::AssetManager.new( Plek.current.find("asset-manager"), bearer_token: ENV["ASSET_MANAGER_BEARER_TOKEN"] || "<PASSWORD>", ) end end
navikom/tinkerpop-js
test/tinkergraph/proccess/Graph.min.spec.js
import { TinkerFactory, both } from '../../../src'; describe('Graph min', () => { const graph = TinkerFactory.createModern(); const g = graph.traversal(); it('g.V().values("age").min().next(); should return 27', () => { expect(g.V().values("age").min().next()).toBe(27); }); it('g.V().values("age").min().next(); should return 27', () => { expect(g.V().repeat(both()).times(3).values("age").min().next()).toBe(27); }); });
bobheadlabs/sourcegraph
enterprise/internal/batches/testing/config.go
<reponame>bobheadlabs/sourcegraph package testing import ( "testing" "github.com/sourcegraph/sourcegraph/enterprise/internal/batches/types/scheduler/config" "github.com/sourcegraph/sourcegraph/internal/conf" ) func MockConfig(t testing.TB, mockery *conf.Unified) { t.Helper() conf.Mock(mockery) t.Cleanup(func() { conf.Mock(nil) }) config.Reset() }
AaronFriel/pulumi-aws-native
sdk/python/pulumi_aws_native/wafregional/_inputs.py
<gh_stars>10-100 # coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities __all__ = [ 'ByteMatchSetByteMatchTupleArgs', 'ByteMatchSetFieldToMatchArgs', 'GeoMatchSetGeoMatchConstraintArgs', 'IPSetDescriptorArgs', 'RateBasedRulePredicateArgs', 'RulePredicateArgs', 'SizeConstraintSetFieldToMatchArgs', 'SizeConstraintSetSizeConstraintArgs', 'SqlInjectionMatchSetFieldToMatchArgs', 'SqlInjectionMatchSetSqlInjectionMatchTupleArgs', 'WebACLActionArgs', 'WebACLRuleArgs', 'XssMatchSetFieldToMatchArgs', 'XssMatchSetXssMatchTupleArgs', ] @pulumi.input_type class ByteMatchSetByteMatchTupleArgs: def __init__(__self__, *, field_to_match: pulumi.Input['ByteMatchSetFieldToMatchArgs'], positional_constraint: pulumi.Input[str], text_transformation: pulumi.Input[str], target_string: Optional[pulumi.Input[str]] = None, target_string_base64: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "field_to_match", field_to_match) pulumi.set(__self__, "positional_constraint", positional_constraint) pulumi.set(__self__, "text_transformation", text_transformation) if target_string is not None: pulumi.set(__self__, "target_string", target_string) if target_string_base64 is not None: pulumi.set(__self__, "target_string_base64", target_string_base64) @property @pulumi.getter(name="fieldToMatch") def field_to_match(self) -> pulumi.Input['ByteMatchSetFieldToMatchArgs']: return pulumi.get(self, "field_to_match") @field_to_match.setter def field_to_match(self, value: pulumi.Input['ByteMatchSetFieldToMatchArgs']): pulumi.set(self, "field_to_match", value) @property @pulumi.getter(name="positionalConstraint") def positional_constraint(self) -> pulumi.Input[str]: return pulumi.get(self, "positional_constraint") @positional_constraint.setter def positional_constraint(self, value: pulumi.Input[str]): pulumi.set(self, "positional_constraint", value) @property @pulumi.getter(name="textTransformation") def text_transformation(self) -> pulumi.Input[str]: return pulumi.get(self, "text_transformation") @text_transformation.setter def text_transformation(self, value: pulumi.Input[str]): pulumi.set(self, "text_transformation", value) @property @pulumi.getter(name="targetString") def target_string(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "target_string") @target_string.setter def target_string(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "target_string", value) @property @pulumi.getter(name="targetStringBase64") def target_string_base64(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "target_string_base64") @target_string_base64.setter def target_string_base64(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "target_string_base64", value) @pulumi.input_type class ByteMatchSetFieldToMatchArgs: def __init__(__self__, *, type: pulumi.Input[str], data: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "type", type) if data is not None: pulumi.set(__self__, "data", data) @property @pulumi.getter def type(self) -> pulumi.Input[str]: return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @property @pulumi.getter def data(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "data") @data.setter def data(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "data", value) @pulumi.input_type class GeoMatchSetGeoMatchConstraintArgs: def __init__(__self__, *, type: pulumi.Input[str], value: pulumi.Input[str]): pulumi.set(__self__, "type", type) pulumi.set(__self__, "value", value) @property @pulumi.getter def type(self) -> pulumi.Input[str]: return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @property @pulumi.getter def value(self) -> pulumi.Input[str]: return pulumi.get(self, "value") @value.setter def value(self, value: pulumi.Input[str]): pulumi.set(self, "value", value) @pulumi.input_type class IPSetDescriptorArgs: def __init__(__self__, *, type: pulumi.Input[str], value: pulumi.Input[str]): pulumi.set(__self__, "type", type) pulumi.set(__self__, "value", value) @property @pulumi.getter def type(self) -> pulumi.Input[str]: return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @property @pulumi.getter def value(self) -> pulumi.Input[str]: return pulumi.get(self, "value") @value.setter def value(self, value: pulumi.Input[str]): pulumi.set(self, "value", value) @pulumi.input_type class RateBasedRulePredicateArgs: def __init__(__self__, *, data_id: pulumi.Input[str], negated: pulumi.Input[bool], type: pulumi.Input[str]): pulumi.set(__self__, "data_id", data_id) pulumi.set(__self__, "negated", negated) pulumi.set(__self__, "type", type) @property @pulumi.getter(name="dataId") def data_id(self) -> pulumi.Input[str]: return pulumi.get(self, "data_id") @data_id.setter def data_id(self, value: pulumi.Input[str]): pulumi.set(self, "data_id", value) @property @pulumi.getter def negated(self) -> pulumi.Input[bool]: return pulumi.get(self, "negated") @negated.setter def negated(self, value: pulumi.Input[bool]): pulumi.set(self, "negated", value) @property @pulumi.getter def type(self) -> pulumi.Input[str]: return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @pulumi.input_type class RulePredicateArgs: def __init__(__self__, *, data_id: pulumi.Input[str], negated: pulumi.Input[bool], type: pulumi.Input[str]): pulumi.set(__self__, "data_id", data_id) pulumi.set(__self__, "negated", negated) pulumi.set(__self__, "type", type) @property @pulumi.getter(name="dataId") def data_id(self) -> pulumi.Input[str]: return pulumi.get(self, "data_id") @data_id.setter def data_id(self, value: pulumi.Input[str]): pulumi.set(self, "data_id", value) @property @pulumi.getter def negated(self) -> pulumi.Input[bool]: return pulumi.get(self, "negated") @negated.setter def negated(self, value: pulumi.Input[bool]): pulumi.set(self, "negated", value) @property @pulumi.getter def type(self) -> pulumi.Input[str]: return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @pulumi.input_type class SizeConstraintSetFieldToMatchArgs: def __init__(__self__, *, type: pulumi.Input[str], data: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "type", type) if data is not None: pulumi.set(__self__, "data", data) @property @pulumi.getter def type(self) -> pulumi.Input[str]: return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @property @pulumi.getter def data(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "data") @data.setter def data(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "data", value) @pulumi.input_type class SizeConstraintSetSizeConstraintArgs: def __init__(__self__, *, comparison_operator: pulumi.Input[str], field_to_match: pulumi.Input['SizeConstraintSetFieldToMatchArgs'], size: pulumi.Input[int], text_transformation: pulumi.Input[str]): pulumi.set(__self__, "comparison_operator", comparison_operator) pulumi.set(__self__, "field_to_match", field_to_match) pulumi.set(__self__, "size", size) pulumi.set(__self__, "text_transformation", text_transformation) @property @pulumi.getter(name="comparisonOperator") def comparison_operator(self) -> pulumi.Input[str]: return pulumi.get(self, "comparison_operator") @comparison_operator.setter def comparison_operator(self, value: pulumi.Input[str]): pulumi.set(self, "comparison_operator", value) @property @pulumi.getter(name="fieldToMatch") def field_to_match(self) -> pulumi.Input['SizeConstraintSetFieldToMatchArgs']: return pulumi.get(self, "field_to_match") @field_to_match.setter def field_to_match(self, value: pulumi.Input['SizeConstraintSetFieldToMatchArgs']): pulumi.set(self, "field_to_match", value) @property @pulumi.getter def size(self) -> pulumi.Input[int]: return pulumi.get(self, "size") @size.setter def size(self, value: pulumi.Input[int]): pulumi.set(self, "size", value) @property @pulumi.getter(name="textTransformation") def text_transformation(self) -> pulumi.Input[str]: return pulumi.get(self, "text_transformation") @text_transformation.setter def text_transformation(self, value: pulumi.Input[str]): pulumi.set(self, "text_transformation", value) @pulumi.input_type class SqlInjectionMatchSetFieldToMatchArgs: def __init__(__self__, *, type: pulumi.Input[str], data: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "type", type) if data is not None: pulumi.set(__self__, "data", data) @property @pulumi.getter def type(self) -> pulumi.Input[str]: return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @property @pulumi.getter def data(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "data") @data.setter def data(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "data", value) @pulumi.input_type class SqlInjectionMatchSetSqlInjectionMatchTupleArgs: def __init__(__self__, *, field_to_match: pulumi.Input['SqlInjectionMatchSetFieldToMatchArgs'], text_transformation: pulumi.Input[str]): pulumi.set(__self__, "field_to_match", field_to_match) pulumi.set(__self__, "text_transformation", text_transformation) @property @pulumi.getter(name="fieldToMatch") def field_to_match(self) -> pulumi.Input['SqlInjectionMatchSetFieldToMatchArgs']: return pulumi.get(self, "field_to_match") @field_to_match.setter def field_to_match(self, value: pulumi.Input['SqlInjectionMatchSetFieldToMatchArgs']): pulumi.set(self, "field_to_match", value) @property @pulumi.getter(name="textTransformation") def text_transformation(self) -> pulumi.Input[str]: return pulumi.get(self, "text_transformation") @text_transformation.setter def text_transformation(self, value: pulumi.Input[str]): pulumi.set(self, "text_transformation", value) @pulumi.input_type class WebACLActionArgs: def __init__(__self__, *, type: pulumi.Input[str]): pulumi.set(__self__, "type", type) @property @pulumi.getter def type(self) -> pulumi.Input[str]: return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @pulumi.input_type class WebACLRuleArgs: def __init__(__self__, *, action: pulumi.Input['WebACLActionArgs'], priority: pulumi.Input[int], rule_id: pulumi.Input[str]): pulumi.set(__self__, "action", action) pulumi.set(__self__, "priority", priority) pulumi.set(__self__, "rule_id", rule_id) @property @pulumi.getter def action(self) -> pulumi.Input['WebACLActionArgs']: return pulumi.get(self, "action") @action.setter def action(self, value: pulumi.Input['WebACLActionArgs']): pulumi.set(self, "action", value) @property @pulumi.getter def priority(self) -> pulumi.Input[int]: return pulumi.get(self, "priority") @priority.setter def priority(self, value: pulumi.Input[int]): pulumi.set(self, "priority", value) @property @pulumi.getter(name="ruleId") def rule_id(self) -> pulumi.Input[str]: return pulumi.get(self, "rule_id") @rule_id.setter def rule_id(self, value: pulumi.Input[str]): pulumi.set(self, "rule_id", value) @pulumi.input_type class XssMatchSetFieldToMatchArgs: def __init__(__self__, *, type: pulumi.Input[str], data: Optional[pulumi.Input[str]] = None): pulumi.set(__self__, "type", type) if data is not None: pulumi.set(__self__, "data", data) @property @pulumi.getter def type(self) -> pulumi.Input[str]: return pulumi.get(self, "type") @type.setter def type(self, value: pulumi.Input[str]): pulumi.set(self, "type", value) @property @pulumi.getter def data(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "data") @data.setter def data(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "data", value) @pulumi.input_type class XssMatchSetXssMatchTupleArgs: def __init__(__self__, *, field_to_match: pulumi.Input['XssMatchSetFieldToMatchArgs'], text_transformation: pulumi.Input[str]): pulumi.set(__self__, "field_to_match", field_to_match) pulumi.set(__self__, "text_transformation", text_transformation) @property @pulumi.getter(name="fieldToMatch") def field_to_match(self) -> pulumi.Input['XssMatchSetFieldToMatchArgs']: return pulumi.get(self, "field_to_match") @field_to_match.setter def field_to_match(self, value: pulumi.Input['XssMatchSetFieldToMatchArgs']): pulumi.set(self, "field_to_match", value) @property @pulumi.getter(name="textTransformation") def text_transformation(self) -> pulumi.Input[str]: return pulumi.get(self, "text_transformation") @text_transformation.setter def text_transformation(self, value: pulumi.Input[str]): pulumi.set(self, "text_transformation", value)
19920625lsg/itcast-java
socket/java-network-programming/src/main/java/Chapter02Stream/P36CharactersGenerateOptimize.java
/*********************************************************** * @Description : 字符生成器.定义了一个翻出ASCII文本的服务器 * 优化:使用缓存并一次性全部写出 * @author : 梁山广(<NAME>) * @date : 2019/9/25 07:29 * @email : <EMAIL> ***********************************************************/ package Chapter02Stream; import java.io.IOException; import java.io.OutputStream; public class P36CharactersGenerateOptimize { public static void gen(OutputStream out) throws IOException { int firstPrintableCharacter = 33; int numberOfPrintableCharacters = 94; int numberOfCharactersPerline = 72; int start = firstPrintableCharacter; byte[] line = new byte[numberOfCharactersPerline + 2]; while (true) { for (int i = start; i < (start + numberOfCharactersPerline); i++) { // 存储在缓存数组中 line[i - start] = (byte) ((i - firstPrintableCharacter) % numberOfPrintableCharacters + firstPrintableCharacter); } line[72] = (byte) '\r'; line[73] = (byte) '\n'; out.write(line); start = ((start + 1) - firstPrintableCharacter) % numberOfPrintableCharacters + firstPrintableCharacter; } } public static void main(String[] args) { } }
enotio/Tempest
Tempest/ui/controls/lineedit.cpp
#include "lineedit.h" #include <Tempest/Application> #include <Tempest/Android> #include <Tempest/IOS> using namespace Tempest; #ifdef __OSX__ static const KeyEvent::KeyType cmdKey = KeyEvent::K_Command; #else static const KeyEvent::KeyType cmdKey = KeyEvent::K_Control; #endif LineEdit::LineEdit() { resize(100,27); setFocusPolicy( WheelFocus ); const UiMetrics& uiMetrics = Application::uiMetrics(); Font fnt = Application::mainFont(); fnt.setSize( int(uiMetrics.normalTextSize*uiMetrics.uiScale) ); txt.setDefaultFont(fnt); SizePolicy p = sizePolicy(); p.maxSize.h = fnt.size() + fnt.size()/2; p.minSize.h = p.maxSize.h; p.typeV = FixedMax; setSizePolicy(p); setTabChangesFocus(true); } LineEdit::~LineEdit() { storeText(); } void LineEdit::setEchoMode(LineEdit::EchoMode m) { auto st=state(); st.echo = m; setWidgetState(st); } LineEdit::EchoMode LineEdit::echoMode() const { return state().echo; } void LineEdit::setTabChangesFocus(bool ch) { tabChFocus=ch; } bool LineEdit::tabChangesFocus() const { return tabChFocus; } void LineEdit::setFont(const Font &f) { txt.setDefaultFont(f); update(); } const Font &LineEdit::font() const { return txt.defaultFont(); } void LineEdit::setText( const std::string &t ) { std::u16string s; s.assign( t.begin(), t.end() ); setText( s ); } void LineEdit::setText(const std::u16string &t) { if( txt.text()!=t ){ const Validator& v = validator(); v.assign(txt,t); txt.clearSteps(); scroll = 0; onTextChanged(txt.text()); update(); } } void LineEdit::setTextColor(const Color& c) { tColor = c; update(); } const Color& LineEdit::textColor() const { return tColor; } const std::u16string &LineEdit::text() const { return txt.text(); } void LineEdit::setHint(const std::u16string &str) { hnt = str; } void LineEdit::setHint(const std::string &str) { setHint( Tempest::SystemAPI::toUtf16( str ) ); } const std::u16string &LineEdit::hint() const { return hnt; } size_t LineEdit::selectionBegin() const { return txt.selectionBegin(); } size_t LineEdit::selectionEnd() const { return txt.selectionEnd(); } void LineEdit::setSelectionBounds(size_t begin, size_t end) { txt.setSelectionBounds(begin,end); } void LineEdit::resetSelection() { setSelectionBounds(selectionBegin(),selectionBegin()); } size_t LineEdit::cursorForPosition(const Point &pos) const { const Margin& m = margin(); return txt.cursorForPosition(pos-Point(m.left,m.top),echoMode()); } void LineEdit::setEditable(bool e) { auto st = state(); st.editable=e; setWidgetState(st); } void LineEdit::setValidator(LineEdit::Validator *v) { mvalidator.reset(v); validator().assign(txt,txt.text()); txt.clearSteps(); } const LineEdit::Validator &LineEdit::validator() const { if(!mvalidator) mvalidator.reset(new Validator()); return *mvalidator; } bool LineEdit::isEditable() const { return state().editable; } void LineEdit::mouseDownEvent(Tempest::MouseEvent &e) { if(!isEditable()){ e.ignore(); return; } if(!isEnabled()) return; #ifdef __MOBILE_PLATFORM__ size_t oldPress=pressPos; #endif pressPos = cursorForPosition(e.pos()); txt.setSelectionBounds(pressPos,pressPos); #ifdef __ANDROID__ if( isEditable() && oldPress==pressPos ) AndroidAPI::toggleSoftInput(); #elif defined(__IOS__) if( isEditable() && oldPress==pressPos ) iOSAPI::toggleSoftInput(); #endif updateSel(); update(); } void LineEdit::mouseDragEvent(MouseEvent &e) { if(!isEnabled()) return; const size_t pos = cursorForPosition(e.pos()); if( pressPos<=pos ) txt.setSelectionBounds(pressPos,pos);else txt.setSelectionBounds(pos,pressPos); updateSel(); update(); } void LineEdit::mouseUpEvent(Tempest::MouseEvent &) { updateSel(); update(); } void LineEdit::mouseMoveEvent(MouseEvent &) { Tempest::Point p = mapToRoot(Tempest::Point()); Application::showHint(hnt, Tempest::Rect(p.x, p.y, w(), h())); } void LineEdit::paintEvent( PaintEvent &e ) { Painter p(e); style().draw(p,this,Style::E_Background, state(),Rect(0,0,w(),h()),Style::Extra(*this)); style().draw(p,txt, Style::TE_LineEditContent,state(),Rect(0,0,w(),h()),Style::Extra(*this)); paintNested(e); } void LineEdit::resizeEvent(SizeEvent&) { Size sz = size(); sz.w -= margin().xMargin(); sz.h -= margin().yMargin(); txt.setViewport(sz); } void LineEdit::undo() { txt.undo(); isEdited = false; onTextChanged (txt.text()); onEditingFinished(txt.text()); update(); } void LineEdit::redo() { txt.redo(); isEdited = false; onTextChanged (txt.text()); onEditingFinished(txt.text()); update(); } void LineEdit::keyDownEvent( KeyEvent &e ) { if(!isEnabled()) return; if(e.key==Event::K_Tab && (tabChangesFocus() || SystemAPI::isKeyPressed(cmdKey)) ) { focusTraverse( !SystemAPI::isKeyPressed(Event::K_Shift) ); return; } if(SystemAPI::isKeyPressed(cmdKey)){ return; } size_t sedit = txt.selectionBegin(); size_t eedit = txt.selectionEnd(); const Validator& v = validator(); if( e.key==KeyEvent::K_NoKey && isEditable() ){ if( eedit-sedit==txt.size() ){ std::u16string tmp; tmp.resize(1); tmp[0] = e.u16; v.assign(txt,tmp); txt.setSelectionBounds(txt.size()); } else { if( sedit < eedit ) v.erase(txt,sedit,eedit); v.insert(txt,sedit,eedit,e.u16); txt.setSelectionBounds(sedit); } isEdited = true; onTextChanged( txt.text() ); onTextEdited ( txt.text() ); update(); return; } if( e.key==KeyEvent::K_Return ){ storeText(); onEnter(txt.text()); return; } if( e.key==KeyEvent::K_Left ){ if( sedit>0 ) txt.setSelectionBounds(sedit-1); else txt.setSelectionBounds(0); update(); return; } if( e.key==KeyEvent::K_Right ){ if( eedit<txt.size() ) txt.setSelectionBounds(eedit+1); else txt.setSelectionBounds(txt.size()); update(); return; } if( e.key==KeyEvent::K_Back && isEditable() ){ v.erase( txt, sedit, eedit ); txt.setSelectionBounds(sedit,eedit); isEdited = true; onTextChanged( txt.text() ); onTextEdited ( txt.text() ); update(); return; } if( e.key==KeyEvent::K_Delete && isEditable() ){ if(sedit==eedit){ ++sedit; ++eedit; } v.erase( txt, sedit, eedit ); txt.setSelectionBounds(sedit,eedit); isEdited = true; onTextChanged( txt.text() ); onTextEdited ( txt.text() ); update(); return; } e.ignore(); } void LineEdit::keyUpEvent(KeyEvent &e) { if( SystemAPI::isKeyPressed(cmdKey) && isEnabled() ){ if(e.key==KeyEvent::K_Z) undo(); if(e.key==KeyEvent::K_Y) redo(); } } void LineEdit::focusEvent(FocusEvent &e) { storeText(); if( e.reason==Event::TabReason && e.in ){ setSelectionBounds(0,text().size()); } #ifdef __ANDROID__ if(e.in && isEditable()) AndroidAPI::showSoftInput(); #endif #ifdef __IOS__ if(e.in && isEditable()) iOSAPI::showSoftInput(); #endif } void LineEdit::updateSel() { if(echoMode()==NoEcho) txt.setSelectionBounds(txt.size(),txt.size()); } void LineEdit::storeText() { if( isEdited ){ isEdited = false; onEditingFinished( txt.text() ); } } void LineEdit::setWidgetState(const WidgetState &s) { setFocusPolicy( s.editable ? WheelFocus : NoFocus ); Widget::setWidgetState(s); update(); } void LineEdit::Validator::insert(TextModel &string, size_t &ecursor, size_t &cursor, const std::u16string &data) const { string.insert(cursor,data); ++cursor; ++ecursor; } void LineEdit::Validator::insert(TextModel &string, size_t &cursor, size_t &ecursor, char16_t data) const { char16_t ch[2] = { char16_t(data), 0 }; string.insert(cursor,ch); ++cursor; ++ecursor; } void LineEdit::Validator::erase(TextModel &string, size_t &scursor, size_t &ecursor) const { if( scursor < ecursor ) string.erase( scursor, ecursor-scursor ); else if( scursor > 0 ){ string.erase( ecursor-1, 1 ); --scursor; } ecursor = scursor; } void LineEdit::Validator::assign(TextModel &string, const std::u16string &arg) const{ string.assign(arg); } void LineEdit::IntValidator::insert(TextModel &string,size_t &cursor, size_t &ecursor,const std::u16string &data) const { for(auto i:data) insert(string,cursor,ecursor,i); } void LineEdit::IntValidator::insert(TextModel &string,size_t &cursor, size_t &ecursor,char16_t data) const { const std::u16string& chars=string.text(); if( cursor==0 ){ if(!chars.empty() && chars[0]=='-') return; if(data=='-' && !(chars.size()>=1 && chars[0]=='0')){ Validator::insert(string,cursor,ecursor,data); return; } if(data=='0' && ((chars.size()==1 && chars[0]=='-') || chars.size()==0)){ Validator::insert(string,cursor,ecursor,data); return; } if(('1'<=data && data<='9') || data=='-'){ Validator::insert(string,cursor,ecursor,data); return; } return; } const size_t pos = cursor-1; if( data=='0' && !(pos<chars.size() && chars[pos]=='-') && !(chars.size()==1 && chars[0 ]=='0') ){ Validator::insert(string,cursor,ecursor,data); return; } if('1'<=data && data<='9'){ if(chars.size()==1 && chars[0]=='0'){ string.clear(); cursor = 0; ecursor = 0; } Validator::insert(string,cursor,ecursor,data); return; } } void LineEdit::IntValidator::erase(TextModel &string, size_t &scursor, size_t &ecursor) const { Validator::erase(string,scursor,ecursor); if(string.size()==1 && string.text()[0]=='-') string.assign(u"0"); if(string.empty()) string.insert(0,u'0'); } void LineEdit::IntValidator::assign(TextModel &string, const std::u16string &arg) const { if(arg.size()==0){ if(string.size()==0){ string.assign(u"0"); } return; } bool good=true; if(arg[0]=='-'){ if(arg.size()!=1){ if(arg[1]=='0' && arg.size()!=2) good=false; for(size_t i=1; good && i<arg.size(); ++i ){ const char16_t c = arg[i]; good &= ('0'<=c && c<='9'); } } else { good = false; } } else { if(arg[0]=='0' && arg.size()!=1) good=false; for(size_t i=0; good && i<arg.size(); ++i ){ const char16_t c = arg[i]; good &= ('0'<=c && c<='9'); } } if(good){ string.assign(arg); } else { if(string.size()==0){ string.assign(u"0"); } } }
ziwa-gangwang/AndroidDemo
app/src/main/java/com/zhou/android/common/ScrollableViewCompat.java
package com.zhou.android.common; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.StaggeredGridLayoutManager; import android.view.View; import android.widget.AbsListView; import android.widget.ScrollView; public class ScrollableViewCompat { ScrollableViewCompat() { } public interface IScrollView { boolean viewCanScrollUp(); } public static IScrollView getScrollView(View view) { IScrollView iScrollView = null; if (view instanceof AbsListView) { iScrollView = new AbsListViewCompat((AbsListView) view); } else if (view instanceof ScrollView) { iScrollView = new ScrollViewCompat((ScrollView) view); } else if (view instanceof RecyclerView) { iScrollView = new RecyclerViewCompat((RecyclerView) view); } else if (view instanceof IScrollView) { iScrollView = (IScrollView) view; } return iScrollView; } static class AbsListViewCompat implements IScrollView { AbsListView view; AbsListViewCompat(AbsListView view) { this.view = view; } @Override public boolean viewCanScrollUp() { if (view == null) return false; return view.getChildCount() > 0 && (view.getFirstVisiblePosition() > 0 || view.getChildAt(0).getTop() < view.getPaddingTop()); } } static class ScrollViewCompat implements IScrollView { ScrollView view; ScrollViewCompat(ScrollView view) { this.view = view; } @Override public boolean viewCanScrollUp() { if (view == null) return false; View child = view.getChildAt(0); if (child != null) { int childHeight = child.getHeight(); return view.getHeight() < childHeight + view.getPaddingTop() + view.getPaddingBottom(); } return false; } } static class RecyclerViewCompat implements IScrollView { RecyclerView view; RecyclerViewCompat(RecyclerView view) { this.view = view; } @Override public boolean viewCanScrollUp() { if (view == null) return false; RecyclerView.LayoutManager manager = view.getLayoutManager(); if (manager == null) return false; if (manager instanceof LinearLayoutManager) { LinearLayoutManager lm = (LinearLayoutManager) manager; if (lm.getOrientation() == LinearLayoutManager.HORIZONTAL) { return false; } return lm.findFirstVisibleItemPosition() > 0 || lm.findViewByPosition(0).getTop() < view.getPaddingTop(); } else if (manager instanceof StaggeredGridLayoutManager) { StaggeredGridLayoutManager g = (StaggeredGridLayoutManager) manager; int[] info = g.findFirstVisibleItemPositions(null); return info[0] > 0; } return false; } } }