text stringlengths 1 1.05M |
|---|
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.0
// protoc (unknown)
// source: proto/account/v1alpha1/account.proto
package v1alpha
import (
_ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
_ "google.golang.org/protobuf/types/known/emptypb"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type IsExistRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"`
}
func (x *IsExistRequest) Reset() {
*x = IsExistRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *IsExistRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*IsExistRequest) ProtoMessage() {}
func (x *IsExistRequest) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use IsExistRequest.ProtoReflect.Descriptor instead.
func (*IsExistRequest) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{0}
}
func (x *IsExistRequest) GetUsername() string {
if x != nil {
return x.Username
}
return ""
}
type IsExistResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
IsExist bool `protobuf:"varint,1,opt,name=is_exist,json=isExist,proto3" json:"is_exist,omitempty"`
}
func (x *IsExistResponse) Reset() {
*x = IsExistResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *IsExistResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*IsExistResponse) ProtoMessage() {}
func (x *IsExistResponse) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use IsExistResponse.ProtoReflect.Descriptor instead.
func (*IsExistResponse) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{1}
}
func (x *IsExistResponse) GetIsExist() bool {
if x != nil {
return x.IsExist
}
return false
}
type CreateAccountRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"`
Mail string `protobuf:"bytes,2,opt,name=mail,proto3" json:"mail,omitempty"`
Password string `protobuf:"bytes,3,opt,name=password,proto3" json:"password,omitempty"`
PublicKey string `protobuf:"bytes,4,opt,name=publicKey,proto3" json:"publicKey,omitempty"`
}
func (x *CreateAccountRequest) Reset() {
*x = CreateAccountRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *CreateAccountRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*CreateAccountRequest) ProtoMessage() {}
func (x *CreateAccountRequest) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use CreateAccountRequest.ProtoReflect.Descriptor instead.
func (*CreateAccountRequest) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{2}
}
func (x *CreateAccountRequest) GetUsername() string {
if x != nil {
return x.Username
}
return ""
}
func (x *CreateAccountRequest) GetMail() string {
if x != nil {
return x.Mail
}
return ""
}
func (x *CreateAccountRequest) GetPassword() string {
if x != nil {
return x.Password
}
return ""
}
func (x *CreateAccountRequest) GetPublicKey() string {
if x != nil {
return x.PublicKey
}
return ""
}
type CreateAccountResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Code string `protobuf:"bytes,1,opt,name=code,proto3" json:"code,omitempty"`
Reply string `protobuf:"bytes,2,opt,name=reply,proto3" json:"reply,omitempty"`
}
func (x *CreateAccountResponse) Reset() {
*x = CreateAccountResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *CreateAccountResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*CreateAccountResponse) ProtoMessage() {}
func (x *CreateAccountResponse) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use CreateAccountResponse.ProtoReflect.Descriptor instead.
func (*CreateAccountResponse) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{3}
}
func (x *CreateAccountResponse) GetCode() string {
if x != nil {
return x.Code
}
return ""
}
func (x *CreateAccountResponse) GetReply() string {
if x != nil {
return x.Reply
}
return ""
}
type GetByUsernameRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"`
}
func (x *GetByUsernameRequest) Reset() {
*x = GetByUsernameRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetByUsernameRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetByUsernameRequest) ProtoMessage() {}
func (x *GetByUsernameRequest) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetByUsernameRequest.ProtoReflect.Descriptor instead.
func (*GetByUsernameRequest) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{4}
}
func (x *GetByUsernameRequest) GetUsername() string {
if x != nil {
return x.Username
}
return ""
}
type GetByUsernameResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
AccountId string `protobuf:"bytes,1,opt,name=account_id,json=accountId,proto3" json:"account_id,omitempty"`
Username string `protobuf:"bytes,2,opt,name=username,proto3" json:"username,omitempty"`
Mail string `protobuf:"bytes,3,opt,name=mail,proto3" json:"mail,omitempty"`
Password string `protobuf:"bytes,4,opt,name=password,proto3" json:"password,omitempty"`
ActorId string `protobuf:"bytes,5,opt,name=actor_id,json=actorId,proto3" json:"actor_id,omitempty"`
IsPrivate string `protobuf:"bytes,6,opt,name=is_private,json=isPrivate,proto3" json:"is_private,omitempty"`
}
func (x *GetByUsernameResponse) Reset() {
*x = GetByUsernameResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetByUsernameResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetByUsernameResponse) ProtoMessage() {}
func (x *GetByUsernameResponse) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetByUsernameResponse.ProtoReflect.Descriptor instead.
func (*GetByUsernameResponse) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{5}
}
func (x *GetByUsernameResponse) GetAccountId() string {
if x != nil {
return x.AccountId
}
return ""
}
func (x *GetByUsernameResponse) GetUsername() string {
if x != nil {
return x.Username
}
return ""
}
func (x *GetByUsernameResponse) GetMail() string {
if x != nil {
return x.Mail
}
return ""
}
func (x *GetByUsernameResponse) GetPassword() string {
if x != nil {
return x.Password
}
return ""
}
func (x *GetByUsernameResponse) GetActorId() string {
if x != nil {
return x.ActorId
}
return ""
}
func (x *GetByUsernameResponse) GetIsPrivate() string {
if x != nil {
return x.IsPrivate
}
return ""
}
type DeleteAccountRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Password string `protobuf:"bytes,1,opt,name=password,proto3" json:"password,omitempty"`
}
func (x *DeleteAccountRequest) Reset() {
*x = DeleteAccountRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *DeleteAccountRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*DeleteAccountRequest) ProtoMessage() {}
func (x *DeleteAccountRequest) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use DeleteAccountRequest.ProtoReflect.Descriptor instead.
func (*DeleteAccountRequest) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{6}
}
func (x *DeleteAccountRequest) GetPassword() string {
if x != nil {
return x.Password
}
return ""
}
type DeleteAccountResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Code string `protobuf:"bytes,1,opt,name=code,proto3" json:"code,omitempty"`
Reply string `protobuf:"bytes,2,opt,name=reply,proto3" json:"reply,omitempty"`
}
func (x *DeleteAccountResponse) Reset() {
*x = DeleteAccountResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[7]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *DeleteAccountResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*DeleteAccountResponse) ProtoMessage() {}
func (x *DeleteAccountResponse) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[7]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use DeleteAccountResponse.ProtoReflect.Descriptor instead.
func (*DeleteAccountResponse) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{7}
}
func (x *DeleteAccountResponse) GetCode() string {
if x != nil {
return x.Code
}
return ""
}
func (x *DeleteAccountResponse) GetReply() string {
if x != nil {
return x.Reply
}
return ""
}
type EditUsernameRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
Username string `protobuf:"bytes,2,opt,name=username,proto3" json:"username,omitempty"`
}
func (x *EditUsernameRequest) Reset() {
*x = EditUsernameRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[8]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *EditUsernameRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EditUsernameRequest) ProtoMessage() {}
func (x *EditUsernameRequest) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[8]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EditUsernameRequest.ProtoReflect.Descriptor instead.
func (*EditUsernameRequest) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{8}
}
func (x *EditUsernameRequest) GetId() string {
if x != nil {
return x.Id
}
return ""
}
func (x *EditUsernameRequest) GetUsername() string {
if x != nil {
return x.Username
}
return ""
}
type EditUsernameResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Code string `protobuf:"bytes,1,opt,name=code,proto3" json:"code,omitempty"`
Reply string `protobuf:"bytes,2,opt,name=reply,proto3" json:"reply,omitempty"`
}
func (x *EditUsernameResponse) Reset() {
*x = EditUsernameResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[9]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *EditUsernameResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EditUsernameResponse) ProtoMessage() {}
func (x *EditUsernameResponse) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[9]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EditUsernameResponse.ProtoReflect.Descriptor instead.
func (*EditUsernameResponse) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{9}
}
func (x *EditUsernameResponse) GetCode() string {
if x != nil {
return x.Code
}
return ""
}
func (x *EditUsernameResponse) GetReply() string {
if x != nil {
return x.Reply
}
return ""
}
type EditPasswordRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"`
Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password,omitempty"`
New string `protobuf:"bytes,3,opt,name=new,proto3" json:"new,omitempty"`
}
func (x *EditPasswordRequest) Reset() {
*x = EditPasswordRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[10]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *EditPasswordRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EditPasswordRequest) ProtoMessage() {}
func (x *EditPasswordRequest) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[10]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EditPasswordRequest.ProtoReflect.Descriptor instead.
func (*EditPasswordRequest) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{10}
}
func (x *EditPasswordRequest) GetUsername() string {
if x != nil {
return x.Username
}
return ""
}
func (x *EditPasswordRequest) GetPassword() string {
if x != nil {
return x.Password
}
return ""
}
func (x *EditPasswordRequest) GetNew() string {
if x != nil {
return x.New
}
return ""
}
type EditPasswordResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Code string `protobuf:"bytes,1,opt,name=code,proto3" json:"code,omitempty"`
Reply string `protobuf:"bytes,2,opt,name=reply,proto3" json:"reply,omitempty"`
}
func (x *EditPasswordResponse) Reset() {
*x = EditPasswordResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[11]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *EditPasswordResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EditPasswordResponse) ProtoMessage() {}
func (x *EditPasswordResponse) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[11]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EditPasswordResponse.ProtoReflect.Descriptor instead.
func (*EditPasswordResponse) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{11}
}
func (x *EditPasswordResponse) GetCode() string {
if x != nil {
return x.Code
}
return ""
}
func (x *EditPasswordResponse) GetReply() string {
if x != nil {
return x.Reply
}
return ""
}
type EditEmailRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
Mail string `protobuf:"bytes,2,opt,name=mail,proto3" json:"mail,omitempty"`
}
func (x *EditEmailRequest) Reset() {
*x = EditEmailRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[12]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *EditEmailRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EditEmailRequest) ProtoMessage() {}
func (x *EditEmailRequest) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[12]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EditEmailRequest.ProtoReflect.Descriptor instead.
func (*EditEmailRequest) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{12}
}
func (x *EditEmailRequest) GetId() string {
if x != nil {
return x.Id
}
return ""
}
func (x *EditEmailRequest) GetMail() string {
if x != nil {
return x.Mail
}
return ""
}
type EditEmailResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Code string `protobuf:"bytes,1,opt,name=code,proto3" json:"code,omitempty"`
Reply string `protobuf:"bytes,2,opt,name=reply,proto3" json:"reply,omitempty"`
}
func (x *EditEmailResponse) Reset() {
*x = EditEmailResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[13]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *EditEmailResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EditEmailResponse) ProtoMessage() {}
func (x *EditEmailResponse) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[13]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EditEmailResponse.ProtoReflect.Descriptor instead.
func (*EditEmailResponse) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{13}
}
func (x *EditEmailResponse) GetCode() string {
if x != nil {
return x.Code
}
return ""
}
func (x *EditEmailResponse) GetReply() string {
if x != nil {
return x.Reply
}
return ""
}
type GetActorByUsernameRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"`
}
func (x *GetActorByUsernameRequest) Reset() {
*x = GetActorByUsernameRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[14]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetActorByUsernameRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetActorByUsernameRequest) ProtoMessage() {}
func (x *GetActorByUsernameRequest) ProtoReflect() protoreflect.Message {
mi := &file_proto_account_v1alpha1_account_proto_msgTypes[14]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetActorByUsernameRequest.ProtoReflect.Descriptor instead.
func (*GetActorByUsernameRequest) Descriptor() ([]byte, []int) {
return file_proto_account_v1alpha1_account_proto_rawDescGZIP(), []int{14}
}
func (x *GetActorByUsernameRequest) GetUsername() string {
if x != nil {
return x.Username
}
return ""
}
var File_proto_account_v1alpha1_account_proto protoreflect.FileDescriptor
var file_proto_account_v1alpha1_account_proto_rawDesc = []byte{
0x0a, 0x24, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x2f,
0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x15, 0x68, 0x76, 0x78, 0x61, 0x68, 0x76, 0x2e, 0x76,
0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x62, 0x65,
0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d,
0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x22, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x2f, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61,
0x31, 0x2f, 0x61, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x2c, 0x0a,
0x0e, 0x49, 0x73, 0x45, 0x78, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x2c, 0x0a, 0x0f, 0x49,
0x73, 0x45, 0x78, 0x69, 0x73, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19,
0x0a, 0x08, 0x69, 0x73, 0x5f, 0x65, 0x78, 0x69, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08,
0x52, 0x07, 0x69, 0x73, 0x45, 0x78, 0x69, 0x73, 0x74, 0x22, 0x80, 0x01, 0x0a, 0x14, 0x43, 0x72,
0x65, 0x61, 0x74, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65,
0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01,
0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12,
0x0a, 0x04, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6d, 0x61,
0x69, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x18, 0x03,
0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x12, 0x1c,
0x0a, 0x09, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28,
0x09, 0x52, 0x09, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x22, 0x41, 0x0a, 0x15,
0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20,
0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x72, 0x65, 0x70,
0x6c, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x22,
0x32, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x42, 0x79, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65,
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e,
0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e,
0x61, 0x6d, 0x65, 0x22, 0xbc, 0x01, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x42, 0x79, 0x55, 0x73, 0x65,
0x72, 0x6e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1d, 0x0a,
0x0a, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28,
0x09, 0x52, 0x09, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08,
0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6d, 0x61, 0x69, 0x6c,
0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6d, 0x61, 0x69, 0x6c, 0x12, 0x1a, 0x0a, 0x08,
0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x63, 0x74, 0x6f,
0x72, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x61, 0x63, 0x74, 0x6f,
0x72, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x73, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74,
0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x69, 0x73, 0x50, 0x72, 0x69, 0x76, 0x61,
0x74, 0x65, 0x22, 0x32, 0x0a, 0x14, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x41, 0x63, 0x63, 0x6f,
0x75, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61,
0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61,
0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x22, 0x41, 0x0a, 0x15, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65,
0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
0x12, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63,
0x6f, 0x64, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x18, 0x02, 0x20, 0x01,
0x28, 0x09, 0x52, 0x05, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x41, 0x0a, 0x13, 0x45, 0x64, 0x69,
0x74, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64,
0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01,
0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x40, 0x0a, 0x14,
0x45, 0x64, 0x69, 0x74, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01,
0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x72, 0x65, 0x70, 0x6c,
0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x5f,
0x0a, 0x13, 0x45, 0x64, 0x69, 0x74, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d,
0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d,
0x65, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x18, 0x02, 0x20,
0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x12, 0x10, 0x0a,
0x03, 0x6e, 0x65, 0x77, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6e, 0x65, 0x77, 0x22,
0x40, 0x0a, 0x14, 0x45, 0x64, 0x69, 0x74, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x52,
0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18,
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x72,
0x65, 0x70, 0x6c, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x65, 0x70, 0x6c,
0x79, 0x22, 0x36, 0x0a, 0x10, 0x45, 0x64, 0x69, 0x74, 0x45, 0x6d, 0x61, 0x69, 0x6c, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28,
0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x02, 0x20,
0x01, 0x28, 0x09, 0x52, 0x04, 0x6d, 0x61, 0x69, 0x6c, 0x22, 0x3d, 0x0a, 0x11, 0x45, 0x64, 0x69,
0x74, 0x45, 0x6d, 0x61, 0x69, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12,
0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x6f,
0x64, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28,
0x09, 0x52, 0x05, 0x72, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x37, 0x0a, 0x19, 0x47, 0x65, 0x74, 0x41,
0x63, 0x74, 0x6f, 0x72, 0x42, 0x79, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d,
0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d,
0x65, 0x32, 0xf7, 0x06, 0x0a, 0x08, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x73, 0x12, 0x5a,
0x0a, 0x07, 0x49, 0x73, 0x45, 0x78, 0x69, 0x73, 0x74, 0x12, 0x25, 0x2e, 0x68, 0x76, 0x78, 0x61,
0x68, 0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x2e, 0x49, 0x73, 0x45, 0x78, 0x69, 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
0x1a, 0x26, 0x2e, 0x68, 0x76, 0x78, 0x61, 0x68, 0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68,
0x61, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x73, 0x45, 0x78, 0x69, 0x73, 0x74,
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x6c, 0x0a, 0x0d, 0x43, 0x72,
0x65, 0x61, 0x74, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2b, 0x2e, 0x68, 0x76,
0x78, 0x61, 0x68, 0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e,
0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x68, 0x76, 0x78, 0x61, 0x68,
0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x6c, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x42,
0x79, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x2b, 0x2e, 0x68, 0x76, 0x78, 0x61,
0x68, 0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x2e, 0x47, 0x65, 0x74, 0x42, 0x79, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x52,
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x68, 0x76, 0x78, 0x61, 0x68, 0x76, 0x2e,
0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47,
0x65, 0x74, 0x42, 0x79, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x86, 0x01, 0x0a, 0x0d, 0x44, 0x65, 0x6c, 0x65, 0x74,
0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2b, 0x2e, 0x68, 0x76, 0x78, 0x61, 0x68,
0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x68, 0x76, 0x78, 0x61, 0x68, 0x76, 0x2e, 0x76,
0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x44, 0x65,
0x6c, 0x65, 0x74, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f,
0x6e, 0x73, 0x65, 0x22, 0x1a, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x2a, 0x0f, 0x2f, 0x61, 0x70,
0x69, 0x2f, 0x76, 0x31, 0x2f, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x3a, 0x01, 0x2a, 0x12,
0x69, 0x0a, 0x0c, 0x45, 0x64, 0x69, 0x74, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12,
0x2a, 0x2e, 0x68, 0x76, 0x78, 0x61, 0x68, 0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61,
0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x64, 0x69, 0x74, 0x55, 0x73, 0x65, 0x72,
0x6e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x68, 0x76,
0x78, 0x61, 0x68, 0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x64, 0x69, 0x74, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65,
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x69, 0x0a, 0x0c, 0x45, 0x64,
0x69, 0x74, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x12, 0x2a, 0x2e, 0x68, 0x76, 0x78,
0x61, 0x68, 0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x2e, 0x45, 0x64, 0x69, 0x74, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x52,
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x68, 0x76, 0x78, 0x61, 0x68, 0x76, 0x2e,
0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45,
0x64, 0x69, 0x74, 0x50, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f,
0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x60, 0x0a, 0x09, 0x45, 0x64, 0x69, 0x74, 0x45, 0x6d, 0x61,
0x69, 0x6c, 0x12, 0x27, 0x2e, 0x68, 0x76, 0x78, 0x61, 0x68, 0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c,
0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x64, 0x69, 0x74, 0x45,
0x6d, 0x61, 0x69, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x68, 0x76,
0x78, 0x61, 0x68, 0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x64, 0x69, 0x74, 0x45, 0x6d, 0x61, 0x69, 0x6c, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x72, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x41, 0x63,
0x74, 0x6f, 0x72, 0x42, 0x79, 0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x30, 0x2e,
0x68, 0x76, 0x78, 0x61, 0x68, 0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x63, 0x74, 0x6f, 0x72, 0x42, 0x79,
0x55, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
0x28, 0x2e, 0x68, 0x76, 0x78, 0x61, 0x68, 0x76, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61,
0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x63, 0x74, 0x6f, 0x72, 0x44, 0x61, 0x74,
0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x31, 0x5a, 0x2f, 0x67,
0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x76, 0x78, 0x61, 0x68, 0x76,
0x2f, 0x68, 0x76, 0x78, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x61,
0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x62, 0x06,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_proto_account_v1alpha1_account_proto_rawDescOnce sync.Once
file_proto_account_v1alpha1_account_proto_rawDescData = file_proto_account_v1alpha1_account_proto_rawDesc
)
func file_proto_account_v1alpha1_account_proto_rawDescGZIP() []byte {
file_proto_account_v1alpha1_account_proto_rawDescOnce.Do(func() {
file_proto_account_v1alpha1_account_proto_rawDescData = protoimpl.X.CompressGZIP(file_proto_account_v1alpha1_account_proto_rawDescData)
})
return file_proto_account_v1alpha1_account_proto_rawDescData
}
var file_proto_account_v1alpha1_account_proto_msgTypes = make([]protoimpl.MessageInfo, 15)
var file_proto_account_v1alpha1_account_proto_goTypes = []interface{}{
(*IsExistRequest)(nil), // 0: hvxahv.v1alpha1.proto.IsExistRequest
(*IsExistResponse)(nil), // 1: hvxahv.v1alpha1.proto.IsExistResponse
(*CreateAccountRequest)(nil), // 2: hvxahv.v1alpha1.proto.CreateAccountRequest
(*CreateAccountResponse)(nil), // 3: hvxahv.v1alpha1.proto.CreateAccountResponse
(*GetByUsernameRequest)(nil), // 4: hvxahv.v1alpha1.proto.GetByUsernameRequest
(*GetByUsernameResponse)(nil), // 5: hvxahv.v1alpha1.proto.GetByUsernameResponse
(*DeleteAccountRequest)(nil), // 6: hvxahv.v1alpha1.proto.DeleteAccountRequest
(*DeleteAccountResponse)(nil), // 7: hvxahv.v1alpha1.proto.DeleteAccountResponse
(*EditUsernameRequest)(nil), // 8: hvxahv.v1alpha1.proto.EditUsernameRequest
(*EditUsernameResponse)(nil), // 9: hvxahv.v1alpha1.proto.EditUsernameResponse
(*EditPasswordRequest)(nil), // 10: hvxahv.v1alpha1.proto.EditPasswordRequest
(*EditPasswordResponse)(nil), // 11: hvxahv.v1alpha1.proto.EditPasswordResponse
(*EditEmailRequest)(nil), // 12: hvxahv.v1alpha1.proto.EditEmailRequest
(*EditEmailResponse)(nil), // 13: hvxahv.v1alpha1.proto.EditEmailResponse
(*GetActorByUsernameRequest)(nil), // 14: hvxahv.v1alpha1.proto.GetActorByUsernameRequest
(*ActorDataResponse)(nil), // 15: hvxahv.v1alpha1.proto.ActorDataResponse
}
var file_proto_account_v1alpha1_account_proto_depIdxs = []int32{
0, // 0: hvxahv.v1alpha1.proto.Accounts.IsExist:input_type -> hvxahv.v1alpha1.proto.IsExistRequest
2, // 1: hvxahv.v1alpha1.proto.Accounts.CreateAccount:input_type -> hvxahv.v1alpha1.proto.CreateAccountRequest
4, // 2: hvxahv.v1alpha1.proto.Accounts.GetByUsername:input_type -> hvxahv.v1alpha1.proto.GetByUsernameRequest
6, // 3: hvxahv.v1alpha1.proto.Accounts.DeleteAccount:input_type -> hvxahv.v1alpha1.proto.DeleteAccountRequest
8, // 4: hvxahv.v1alpha1.proto.Accounts.EditUsername:input_type -> hvxahv.v1alpha1.proto.EditUsernameRequest
10, // 5: hvxahv.v1alpha1.proto.Accounts.EditPassword:input_type -> hvxahv.v1alpha1.proto.EditPasswordRequest
12, // 6: hvxahv.v1alpha1.proto.Accounts.EditEmail:input_type -> hvxahv.v1alpha1.proto.EditEmailRequest
14, // 7: hvxahv.v1alpha1.proto.Accounts.GetActorByUsername:input_type -> hvxahv.v1alpha1.proto.GetActorByUsernameRequest
1, // 8: hvxahv.v1alpha1.proto.Accounts.IsExist:output_type -> hvxahv.v1alpha1.proto.IsExistResponse
3, // 9: hvxahv.v1alpha1.proto.Accounts.CreateAccount:output_type -> hvxahv.v1alpha1.proto.CreateAccountResponse
5, // 10: hvxahv.v1alpha1.proto.Accounts.GetByUsername:output_type -> hvxahv.v1alpha1.proto.GetByUsernameResponse
7, // 11: hvxahv.v1alpha1.proto.Accounts.DeleteAccount:output_type -> hvxahv.v1alpha1.proto.DeleteAccountResponse
9, // 12: hvxahv.v1alpha1.proto.Accounts.EditUsername:output_type -> hvxahv.v1alpha1.proto.EditUsernameResponse
11, // 13: hvxahv.v1alpha1.proto.Accounts.EditPassword:output_type -> hvxahv.v1alpha1.proto.EditPasswordResponse
13, // 14: hvxahv.v1alpha1.proto.Accounts.EditEmail:output_type -> hvxahv.v1alpha1.proto.EditEmailResponse
15, // 15: hvxahv.v1alpha1.proto.Accounts.GetActorByUsername:output_type -> hvxahv.v1alpha1.proto.ActorDataResponse
8, // [8:16] is the sub-list for method output_type
0, // [0:8] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_proto_account_v1alpha1_account_proto_init() }
func file_proto_account_v1alpha1_account_proto_init() {
if File_proto_account_v1alpha1_account_proto != nil {
return
}
file_proto_account_v1alpha1_actor_proto_init()
if !protoimpl.UnsafeEnabled {
file_proto_account_v1alpha1_account_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*IsExistRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*IsExistResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CreateAccountRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CreateAccountResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetByUsernameRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetByUsernameResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*DeleteAccountRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*DeleteAccountResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*EditUsernameRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*EditUsernameResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*EditPasswordRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*EditPasswordResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*EditEmailRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*EditEmailResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_proto_account_v1alpha1_account_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetActorByUsernameRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_proto_account_v1alpha1_account_proto_rawDesc,
NumEnums: 0,
NumMessages: 15,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_proto_account_v1alpha1_account_proto_goTypes,
DependencyIndexes: file_proto_account_v1alpha1_account_proto_depIdxs,
MessageInfos: file_proto_account_v1alpha1_account_proto_msgTypes,
}.Build()
File_proto_account_v1alpha1_account_proto = out.File
file_proto_account_v1alpha1_account_proto_rawDesc = nil
file_proto_account_v1alpha1_account_proto_goTypes = nil
file_proto_account_v1alpha1_account_proto_depIdxs = nil
}
|
#!/bin/bash
source benchmark-scripts/general-helper.sh
bootstrap;
cd /benchmark-scripts/redis-src
rm *.gcda
rm *.gcov
rm *.log
rm gmon.out
echo hello world
#make test-sentinel > mt.log 2>&1
#wait
#cd src/
#gcov -f -w -c -j * > gcov_fwcj.log 2>&1
#sync
|
<filename>usecase/rest/movie.go
package restUC
import (
"encoding/json"
"fmt"
"net/http"
"strconv"
"github.com/alhamsya/boilerplate-go/domain/constants"
"github.com/alhamsya/boilerplate-go/domain/models/database"
"github.com/alhamsya/boilerplate-go/domain/models/request"
"github.com/alhamsya/boilerplate-go/domain/models/response"
"github.com/alhamsya/boilerplate-go/infrastructure/external/omdb"
"github.com/alhamsya/boilerplate-go/lib/helpers/custom_error"
"github.com/gofiber/fiber/v2"
"github.com/volatiletech/null"
)
//DoGetListMovie get list movie based on request client
func (uc *UCInteractor) DoGetListMovie(ctx *fiber.Ctx, reqClient *modelReq.ListMovie) (resp *modelResp.ListMovie, httpCode int, err error) {
//implement call wrapping and on purpose do not use error wrapping
respWrapper, err := uc.CallWrapperRepo.GetWrapper("omdb").Call(func() (interface{}, error) {
//get data from redis
respMovie, err := uc.CacheRepo.GetListMovie(ctx.Context(), reqClient.Search, reqClient.Page)
if err == nil {
return respMovie, nil
}
//api call to the OMDB
respMovie, err = uc.OMDBRepo.GetListMovie(reqClient.Search, reqClient.Page)
if err != nil {
return nil, err
}
//ignore for return error
uc.CacheRepo.SetListMovie(ctx.Context(), reqClient.Search, reqClient.Page, respMovie)
return respMovie, nil
})
//handle error for API call
if err != nil {
return nil, http.StatusInternalServerError, customError.WrapFlag(err, "OMDBRepo", "GetListMovie")
}
//force data to struct
respMovie := respWrapper.(*omdb.OMDBList)
//handle response wrapper is nil
if respMovie == nil {
return nil, http.StatusInternalServerError, fmt.Errorf("data from api call does not exist")
}
status, err := strconv.ParseBool(respMovie.Response)
if err != nil {
return nil, http.StatusConflict, fmt.Errorf("response from api third party there is a problem")
}
if !status {
return nil, http.StatusBadRequest, fmt.Errorf(respMovie.Error)
}
resp = new(modelResp.ListMovie)
for _, movie := range respMovie.Search {
resp.Items = append(resp.Items, modelResp.Items{
Title: movie.Title,
Year: movie.Year,
MovieID: movie.ImdbID,
Types: movie.Type,
Poster: movie.Poster,
})
}
total, err := strconv.ParseInt(respMovie.TotalResults, 10, 64)
if err != nil {
return nil, http.StatusInternalServerError, fmt.Errorf("fail convert total result")
}
now, err := uc.UtilsRepo.CurrentTimeF(constCommon.DateTime)
if err != nil {
return nil, http.StatusInternalServerError, customError.WrapFlag(err, "datetime", "CurrentTimeF")
}
reqStr, _ := json.Marshal(reqClient)
respStr, _ := json.Marshal(respMovie)
reqDB := &modelDB.HistoryLog{
Endpoint: null.StringFrom(ctx.Path()),
Request: string(reqStr),
Response: string(respStr),
SourceData: constCommon.TypeREST,
CreatedAt: now,
CreatedBy: ctx.IP(),
}
_, err = uc.DBRepo.CreateHistoryLog(ctx.Context(), reqDB)
if err != nil {
return nil, http.StatusInternalServerError, customError.WrapFlag(err, "database", "CreateHistoryLog")
}
resp = &modelResp.ListMovie{
Items: resp.Items,
Total: total,
}
return resp, http.StatusOK, nil
}
func (uc *UCInteractor) DoGetDetailMovie(ctx *fiber.Ctx, movieID string) (resp *modelResp.DetailMovie, httpCode int, err error) {
//implement call wrapping and on purpose do not use error wrapping
respWrapper, err := uc.CallWrapperRepo.GetWrapper("omdb").Call(func() (interface{}, error) {
//get data from redis
respMovie, err := uc.CacheRepo.GetDetailMovie(ctx.Context(), movieID)
if err == nil {
return respMovie, nil
}
//api call to the OMDB
respMovie, err = uc.OMDBRepo.GetDetailMovie(movieID)
if err != nil {
return nil, err
}
//ignore for return error
uc.CacheRepo.SetDetailMovie(ctx.Context(), movieID, respMovie)
return respMovie, nil
})
//handle error for API call
if err != nil {
return nil, http.StatusInternalServerError, customError.WrapFlag(err, "OMDBRepo", "GetDetailMovie")
}
//handle response wrapper is nil
if respWrapper == nil {
return nil, http.StatusInternalServerError, fmt.Errorf("data from api call does not exist")
}
//force data to struct
respMovie := respWrapper.(*omdb.OMDBDetail)
status, err := strconv.ParseBool(respMovie.Response)
if err != nil {
return nil, http.StatusConflict, customError.Wrap(err, "response from api third party there is a problem")
}
if !status {
return nil, http.StatusBadRequest, customError.WrapFlag(fmt.Errorf(respMovie.Error), "OMDBRepo", "status third party")
}
resp = new(modelResp.DetailMovie)
for _, rating := range respMovie.Ratings {
resp.Ratings = append(resp.Ratings, modelResp.Ratings{
Source: rating.Source,
Value: rating.Value,
})
}
now, err := uc.UtilsRepo.CurrentTimeF(constCommon.DateTime)
if err != nil {
return nil, http.StatusInternalServerError, customError.Wrap(err, "CurrentTimeF")
}
reqStr, _ := json.Marshal(movieID)
respStr, _ := json.Marshal(respMovie)
reqDB := &modelDB.HistoryLog{
Endpoint: null.StringFrom(ctx.Path()),
Request: string(reqStr),
Response: string(respStr),
SourceData: constCommon.TypeREST,
CreatedAt: now,
CreatedBy: ctx.IP(),
}
_, err = uc.DBRepo.CreateHistoryLog(ctx.Context(), reqDB)
if err != nil {
return nil, http.StatusInternalServerError, customError.WrapFlag(err, "database", "CreateHistoryLog")
}
resp = &modelResp.DetailMovie{
Title: respMovie.Title,
Year: respMovie.Year,
Rated: respMovie.Rated,
Released: respMovie.Released,
Runtime: respMovie.Runtime,
Genre: respMovie.Genre,
Director: respMovie.Director,
Writer: respMovie.Writer,
Actors: respMovie.Actors,
Plot: respMovie.Plot,
Language: respMovie.Language,
Country: respMovie.Country,
Awards: respMovie.Awards,
Poster: respMovie.Poster,
Ratings: resp.Ratings,
MetaScore: respMovie.Metascore,
ImdbRating: respMovie.ImdbRating,
ImdbVotes: respMovie.ImdbVotes,
ImdbID: respMovie.ImdbID,
Type: respMovie.Type,
DVD: respMovie.DVD,
BoxOffice: respMovie.BoxOffice,
Production: respMovie.Production,
Website: respMovie.Website,
}
return resp, http.StatusOK, err
}
|
<reponame>trezm/friendjs<filename>lib/controllers/email_controller.js
var mandrill = require('mandrill-api');
var mandrillClient = new mandrill.Mandrill(config.mandrill.apiKey);
var defaultEmailHtml = function(confirmationAddress, confirmationCode) {
var email = "To confirm your account, please follow the link:\n";
email += "\n";
email += confirmationAddress + "?code=" + encodeURIComponent(confirmationCode);
return email;
}
var defaultEmailText = function(confirmationAddress, confirmationCode) {
var email = "To confirm your account, please follow the link:\n";
email += "\n";
email += confirmationAddress + "?code=" + encodeURIComponent(confirmationCode);
return email;
}
sendConfirmation = function(confirmationCode, to, from, subject) {
var mandrillOptions = {
html: defaultEmailHtml(config.mandrill.confirmationAddress, confirmationCode),
text: defaultEmailText(config.mandrill.confirmationAddress, confirmationCode),
to: to,
subject: subject ? subject : "confirm your account",
from_email: from.email,
from_name: from.to
}
mandrillClient.messages.send({
message: mandrillOptions,
async: false
},
function(results) {
console.log('results:', results);
},
function(error) {
console.log('error:', error);
});
}
module.exports = {
sendConfirmation: sendConfirmation
}; |
import React from 'react';
export default class ArrowDown extends React.Component {
render() {
const { width, height, color } = this.props;
return (
<svg width={width} height={height} viewBox="0 0 140 140" version="1.1" >
<g id="Page-1" stroke="none" strokeWidth="1" fill="none" fillRule="evenodd">
<g id="Desktop-HD" transform="translate(-1537.000000, -951.000000)" fill={color} fillRule="nonzero">
<g id="icon_Renew_Add" transform="translate(1537.000000, 954.000000)">
<path d="M71.8064516,6.77419355 C105.677419,6.77419355 133.225806,34.3225806 133.225806,67.7419355 C133.225806,101.16129 105.677419,128.709677 71.8064516,128.709677 C37.9354839,128.709677 10.3870968,101.16129 10.3870968,67.7419355 C10.3870968,34.3225806 37.9354839,6.77419355 71.8064516,6.77419355 Z M71.8064516,0 C34.3225806,0 3.61290323,30.2580645 3.61290323,67.7419355 C3.61290323,105.225806 34.3225806,135.483871 71.8064516,135.483871 C109.290323,135.483871 140,105.225806 140,67.7419355 C140,30.2580645 109.290323,0 71.8064516,0 Z" id="Shape"></path>
<path d="M61.25,96.4425 C58.9813093,96.5234854 56.8716966,95.2809303 55.8425,93.2575 L40.0225,77.5075 C37.743177,75.2281744 37.743178,71.5326599 40.0225023,69.2533356 C42.3018266,66.9740113 45.9973411,66.9740103 48.2766667,69.2533333 L61.2441667,82.1508333 L92.3358333,51.0533333 C94.6247102,48.8426621 98.2629945,48.8742778 100.513108,51.1243916 C102.763222,53.3745055 102.794838,57.0127898 100.584167,59.3016667 L66.6691667,93.2225 C66.3879844,93.7758019 66.0218819,94.2816532 65.5841667,94.7216667 C64.4904591,95.8157047 63.0069697,96.4305029 61.46,96.4308333 L61.25,96.4425 Z" id="圆角矩形_950_拷贝_8" fill={color} fillRule="nonzero"></path>
</g>
</g>
</g>
</svg>
)
}
} |
<filename>CalibData/Acd/AcdPE.h<gh_stars>0
// $Header: /nfs/slac/g/glast/ground/cvs/CalibData/CalibData/Acd/AcdCno.h,v 1.4 2008/01/23 23:42:38 echarles Exp $
#ifndef CalibData_AcdPE_h
#define CalibData_AcdPE_h
#include "CalibData/Acd/AcdCalibObj.h"
#include "CalibData/CalibModel.h"
#include "CalibData/Acd/AcdCalibEnum.h"
namespace CalibData {
/**
* @class AcdPEFitDesc
*
* @brief Description of the number of photoelectrons/mip calibration
* - PE = the number of photoelectrons/ mip
*
* This calibration consists of:
*
* @author <NAME>
* $Header: /nfs/slac/g/glast/ground/cvs/CalibData/CalibData/Acd/AcdPE.h,v 1.4 2008/01/23 23:42:38 echarles Exp $
*/
class AcdPEFitDesc : public AcdCalibDescription {
public:
/// Get this description
static const AcdPEFitDesc& instance() {
static const AcdPEFitDesc desc;
return desc;
};
public:
/// Trivial D'ctor
virtual ~AcdPEFitDesc(){;};
private:
/// This is a singleton
AcdPEFitDesc()
:AcdCalibDescription(AcdCalibData::PE,"ACD_PE"){
addVarName("PE");
}
};
/**
* @class AcdPE
*
* @brief Number of photoelectrons/mip calibration for 1 PMT.
*
* This calibration consists of:
* - PE = the number of photoelectrons/ mip
*
* @author <NAME>
* $Header: /nfs/slac/g/glast/ground/cvs/CalibData/CalibData/Acd/AcdPE.h,v 1.4 2008/01/23 23:42:38 echarles Exp $
*/
class AcdPE : public AcdCalibObj {
public:
/// For gaudi
static const CLID& calibCLID() {
return CLID_Calib_ACD_PE;
}
/// Define the type of calibration
static AcdCalibData::CALTYPE calibType() {
return AcdCalibData::PE;
}
public:
/// Build from description and a set of values
AcdPE(const AcdCalibDescription& desc, const std::vector<float>& vals, STATUS status=NOFIT) :
AcdCalibObj(status,vals,desc){
assert( desc.calibType() == calibType() );
setVals(vals,status);
}
/// Build from individaul values
AcdPE(float PE, STATUS status) :
AcdCalibObj(status,AcdPEFitDesc::instance()){
setVals(PE,status);
}
/// Trivial d'tor
virtual ~AcdPE() {}
// Provide access to the values
float getPE() const { return (*this)[0];}
};
}
#endif
|
#!/bin/bash
#
# Script to configure AWS Config Service for Delegated Admin.
# This script should be run in the AWS Organizations Management (formerly Master) Account
#
# Copyright 2021 Chris Farris <chrisf@primeharbor.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ADMIN=$1
if [ -z "$ADMIN" ] ; then
echo "$0 <account_id for delegated admin for config>"
exit 1
fi
aws organizations enable-aws-service-access --service-principal=config-multiaccountsetup.amazonaws.com
aws organizations enable-aws-service-access --service-principal=config.amazonaws.com
aws organizations register-delegated-administrator --account-id $ADMIN --service-principal config-multiaccountsetup.amazonaws.com
aws organizations register-delegated-administrator --account-id $ADMIN --service-principal config.amazonaws.com
aws organizations list-delegated-administrators --service-principal config-multiaccountsetup.amazonaws.com
aws organizations list-delegated-administrators --service-principal config.amazonaws.com
|
<reponame>aishwaryashand/Core-Java<gh_stars>0
class A
{
public static void main(String[] args)
{
double a=200.0;
float b=(float)a;
long c=(long)a;
int d=(int)a;
short e=(short)a;
byte f=(byte)a;
char g=(char)a;
System.out.println(a);
System.out.println(b);
System.out.println(c);
System.out.println(d);
System.out.println(e);
System.out.println(f);
System.out.println(g);
}
}
|
#=======
# Author: <NAME> (<EMAIL>)
#=======
module UICov
ScreenInfo = Class.new(CoverageInfo)
end
|
<reponame>ituk-ttu/ITUK-API
package ee.ituk.api.door.repository;
import ee.ituk.api.door.domain.DoorPermission;
import ee.ituk.api.user.domain.User;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.List;
public interface DoorPermissionRepository extends JpaRepository<DoorPermission, Long> {
List<DoorPermission> findByUser(User user);
}
|
function processResponse<T>(response: Response<T>): string {
let result = `Source: ${response.source}\nDestination: ${response.destination}\nPayload: ${response.payload}`;
if (response.error) {
result += `\nError: ${response.error}`;
}
return result;
} |
<gh_stars>100-1000
// https://open.kattis.com/problems/dream
#include <bits/stdc++.h>
using namespace std;
int main() {
unordered_map<string, int> m;
vector<string> s;
int n;
cin >> n;
while (n--) {
char c;
cin >> c;
if (c == 'E') {
string t;
cin >> t;
m[t] = s.size();
s.push_back(t);
} else if (c == 'D') {
int r;
cin >> r;
while (r--) {
m.erase(s.back());
s.pop_back();
}
} else {
int i = -1, j = s.size(), k;
cin >> k;
while (k--) {
string t;
cin >> t;
if (t[0] == '!') {
t = t.substr(1);
if (m.count(t)) j = min(j, m[t]);
} else {
if (!m.count(t)) i = s.size();
else i = max(i, m[t]);
}
}
if (i < j) {
if (j == s.size()) cout << "Yes\n";
else cout << s.size() - j << " Just A Dream\n";
} else cout << "Plot Error\n";
}
}
}
|
const Protocol = require("protodef-neo")
const protocolData = require("./protocol.json")
const beam = new Protocol(protocolData).get("beam")
/**
* DEFAULT_UNIT - time units (1000 = 1 sec)
* DEFAULT_EPOCH - custom epoch (01/01/2000 00:00:00:000)
*/
const DEFAULT_UNIT = 1000
const DEFAULT_EPOCH = 946684800000
module.exports = class Sunbeam {
constructor (value = 0, unit = DEFAULT_UNIT, epoch = DEFAULT_EPOCH) {
this.unit = unit | 0
this.epoch = +epoch
this.value = value
}
get value () { return this.data }
set value (val) {
if (val instanceof Sunbeam) {
this.unit = val.unit
this.epoch = val.epoch
this.data = val.data
return
}
const buf = Buffer.alloc(8)
switch (typeof val) {
case "number":
buf.writeUInt32BE(val % 2**32)
buf.writeUInt32BE(val / 2**32 | 0, 4)
break
case "bigint":
buf.writeBigInt64BE(val)
break
case "string":
case "object":
Buffer.from(val, "base64").copy(buf)
break
default:
throw new Error("Unsupported type")
}
this.data = beam.fromBuffer(buf)
}
clone () { return new Sunbeam(this) }
toBuffer () { return beam.toBuffer(this.data) }
toString () { return this.toBuffer().toString("base64") }
toBigInt () { return this.toBuffer().readBigInt64BE() }
toNumber () {
const buf = this.toBuffer()
return buf.readUInt32BE() * 2**32 + buf.readUInt32BE(4)
}
get timestamp () { return this.data.timestamp }
get machineId () { return this.data.machineId }
get sequenceId () { return this.data.sequenceId }
get flipFlag () { return this.data.flipFlag }
set timestamp (v) { this.data.timestamp = +v }
set machineId (v) { this.data.machineId = v | 0 }
set sequenceId (v) { this.data.sequenceId = v | 0 }
set flipFlag (v) { this.data.flipFlag = v & 1 }
static generate (machineId, sequenceId, flipFlag, unit, epoch) {
const beam = new Sunbeam(void 0, unit, epoch)
beam.machineId = machineId
beam.sequenceId = sequenceId
beam.flipFlag = flipFlag
beam.timestamp = Date.now()
return beam
}
static *createGenerator (machineId, unit, epoch) {
const beam = new Sunbeam(void 0, unit, epoch)
beam.machineId = machineId
let sequenceId = 0
let flipFlag = 0
let prevTime = beam.unit
let currTime = 0
while (true) {
if (prevTime > (currTime = Date.now() % unit)) {
prevTime = currTime
sequenceId = 0
}
beam.timestamp = Date.now()
beam.sequenceId = (sequenceId = sequenceId++ % (1 << 8))
beam.flipFlag = (flipFlag ^= 1)
yield beam.clone()
}
}
}
|
def reverse_stack(input_stack):
new_stack = []
while len(input_stack) > 0:
top_element = input_stack.pop()
new_stack.append(top_element)
for i in reversed(new_stack):
input_stack.append(i)
return input_stack |
package com.tweetapp.app.dao.repository;
import com.tweetapp.app.dao.entity.Tweet;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.data.mongodb.repository.Query;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface TweetRepository extends MongoRepository<Tweet, String> {
List<Tweet> findByUserIdAndReplyNot(String userId, boolean isReply);
List<Tweet> findByReplyNot(boolean isReply);
Tweet findByTweetId(String id);
String deleteByTweetId(String tweetId);
}
|
<reponame>kyu9341/relay_13<gh_stars>0
const result = {
"predictions": [
{
"num_detections": 3,
"detection_classes": [
18.0,
18.0,
3.0
],
"detection_names": [
"dog",
"dog",
"car"
],
"detection_scores": [
0.998164,
0.997666,
0.622512
],
"detection_boxes": [
[
0.0565129,
0.376249,
0.960169,
0.979782
],
[
0.0280469,
0.0325073,
0.867158,
0.314543
],
[
0.0,
0.700688,
0.099921,
0.795676
]
]
}
]
}
if (result['predictions'].length){
console.log('hello')
const postsElement = document.getElementById('insertedButton')
console.log(result)
console.log(result['predictions'][0]['detection_classes'])
console.log(postsElement)
postsElement.innerHTML = `${
result['predictions'][0]['detection_names'].map((detectionObject, index) => {
result['predictions'][0]['detection_boxes'][index]
return `
<button class = 'objectTag'
onclick = "imageResizing(event)"
data-detection_boxes=${result['predictions'][0]['detection_boxes'][index]}>
${detectionObject}
</button>
`
})
}
`
}
else{
alert(
'객체가 없는데요?'
)
}
|
<gh_stars>1-10
package logic;
/**
* Oil Friction MOdifier
* @author tdani
*
*/
public class Oil extends FrictionModifier{
private static final long serialVersionUID = -5183636879223466445L;
/**
* the Friction, always 0.2 times the friction of its superclass.
* @return the friction of oil
*/
@Override
public Double getFriction()
{
return 0.2*this.friction;
}
}
|
#!/usr/bin/env bash
cue vet src/tokens/safecoin.tokenlist.json automerge/schema.cue -d '#Tokenlist'
|
package com.engg.digitalorg.util;
import lombok.extern.slf4j.Slf4j;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URL;
import java.util.regex.Pattern;
import java.util.zip.DataFormatException;
import java.util.zip.Deflater;
import java.util.zip.Inflater;
/**
* The type Digital util.
*/
@Slf4j
public class DigitalUtil {
/**
* Is email valid boolean.
*
* @param email the email
* @return the boolean
*/
public static boolean isEmailValid(String email) {
String emailRegex = "^[a-zA-Z0-9_+&*-]+(?:\\." +
"[a-zA-Z0-9_+&*-]+)*@" +
"(?:[a-zA-Z0-9-]+\\.)+[a-z" +
"A-Z]{2,7}$";
Pattern pat = Pattern.compile(emailRegex);
if (email == null)
return false;
return pat.matcher(email).matches();
}
/**
* Is url valid boolean.
*
* @param url the url
* @return the boolean
*/
public static boolean isUrlValid(String url) {
try {
new URL(url).toURI();
return true;
} catch (Exception e) {
return false;
}
}
/**
* Compress bytes byte [ ].
*
* @param data the data
* @return the byte [ ]
*/
// compress the image bytes before storing it in the database
public static byte[] compressBytes(byte[] data) {
Deflater deflater = new Deflater();
deflater.setInput(data);
deflater.finish();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream(data.length);
byte[] buffer = new byte[1024];
while (!deflater.finished()) {
int count = deflater.deflate(buffer);
outputStream.write(buffer, 0, count);
}
try {
log.info("Compressed Image Byte Size - " + outputStream.toByteArray().length);
outputStream.close();
} catch (IOException e) {
log.error("Exception durring compressBytes ", e.getMessage());
}
return outputStream.toByteArray();
}
/**
* Decompress bytes byte [ ].
*
* @param data the data
* @return the byte [ ]
*/
// uncompress the image bytes before returning it to the angular application
public static byte[] decompressBytes(byte[] data) {
Inflater inflater = new Inflater();
inflater.setInput(data);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream(data.length);
byte[] buffer = new byte[1024];
try {
while (!inflater.finished()) {
int count = inflater.inflate(buffer);
outputStream.write(buffer, 0, count);
}
outputStream.close();
} catch (IOException ioe) {
log.error("Exception during decompressBytes", ioe.getMessage());
} catch (DataFormatException e) {
log.error("Exception during decompressBytes", e.getMessage());
}
return outputStream.toByteArray();
}
}
|
<reponame>kolesnikovde/neighbours<gh_stars>0
'use strict';
exports.vonNeumann = function(options, fn) {
if (!fn) fn = options, options = {};
var x = options.x || 0,
y = options.y || 0,
r = options.range || 1;
for (var i = 1; i <= r; ++i) {
for (var j = i; j > 0; --j) {
var k = i - j;
if (fn(x - j, y - k, i) === false ||
fn(x + k, y - j, i) === false ||
fn(x + j, y + k, i) === false ||
fn(x - k, y + j, i) === false) return;
}
}
}
exports.moore = function(options, fn) {
if (!fn) fn = options, options = {};
var x = options.x || 0,
y = options.y || 0,
r = options.range || 1;
for (var i = 1; i <= r; ++i) {
for (var j = -i; j <= i - 1; ++j) {
if (fn(x + j, y - i, i) === false ||
fn(x + i, y + j, i) === false ||
fn(x - j, y + i, i) === false ||
fn(x - i, y - j, i) === false) return;
}
}
}
|
<gh_stars>1-10
;(() => {
console.log('Ol<NAME>')
preventFormSubmit()
renderNames()
const inputName = document.querySelector('#inputName')
textInputFunctions(inputName)
})()
let globalNames = ['Um', 'Dois', 'Três']
let isEditing = false
let currentIndex = null
/**
* Previne Que a Página recarregue ao enviar o formulário
*/
function preventFormSubmit() {
function handleFormSubmit(event) {
event.preventDefault()
}
let form = document.querySelector('form')
form.addEventListener('submit', handleFormSubmit)
}
/**
* Define as funções ao Input Parâmetro:
* - Ativa ao Carregar a Página
* - Salva e Renderiza o texto ao Enviar o Formulário
* @param {HTMLFormElement} input
*/
function textInputFunctions(input) {
function insertName(newName) {
// globalNames.push(newName)
globalNames = [...globalNames, newName]
}
function updateName(editedName) {
globalNames[currentIndex] = editedName
}
function handleTyping(event) {
let hasText = !!event.target.value && event.target.value.trim() !== ''
if (!hasText) return clearInput(input)
if (event.key === 'Enter') {
if (isEditing) {
updateName(event.target.value)
} else {
insertName(event.target.value)
}
isEditing = false
clearInput(input)
renderNames()
}
}
input.focus()
input.addEventListener('keyup', handleTyping)
}
/**
* Renderiza os Nomes na Tela do Usuário
*/
function renderNames() {
let input = document.querySelector('#inputName')
function createDeleteButton(index) {
function deleteName() {
// globalNames.splice(index, 1)
globalNames = globalNames.filter((_, i) => i !== index)
renderNames()
clearInput(input)
}
let button = document.createElement('button')
button.textContent = 'x'
button.classList.add('deleteButton')
button.addEventListener('click', deleteName)
return button
}
function createSpan(name, index) {
function editName() {
input.value = name
input.focus()
isEditing = true
currentIndex = index
}
let span = document.createElement('span')
span.textContent = name
span.classList.add('clickable')
span.addEventListener('click', editName)
return span
}
let divNames = document.querySelector('#names')
divNames.innerHTML = ''
let ul = document.createElement('ul')
for (let i = 0; i < globalNames.length; i++) {
let currentName = globalNames[i]
let button = createDeleteButton(i)
let span = createSpan(currentName, i)
let li = document.createElement('li')
li.appendChild(button)
li.appendChild(span)
ul.appendChild(li)
}
divNames.appendChild(ul)
}
// /**
// * Limpa o Input e Foca nele novamente :D
// * @param {HTMLFormElement} input
// */
// function clearInput(input) {
// input.value = ""
// input.focus()
// }
/**
* Limpa o Input e Foca nele novamente :D
* @param {HTMLFormElement} input
*/
const clearInput = input => {
input.value = ''
input.focus()
}
|
# SSH and attach tmux session
sst() {
ssh "$1" -t tmux attach
}
|
<reponame>jiqiang/golang-learning
package main
import "fmt"
import "time"
func main() {
// limit by time interval
requests1 := make(chan int, 5)
for i := 1; i <= 5; i++ {
requests1 <- i
}
close(requests1)
limiter1 := time.Tick(time.Millisecond * 500)
for req := range requests1 {
<-limiter1
fmt.Println("request", req, time.Now())
}
// limit by number of same time requests
limiter2 := make(chan time.Time, 3)
for i := 1; i <= 3; i++ {
limiter2 <- time.Now()
}
go func() {
for t := range time.Tick(time.Millisecond * 500) {
limiter2 <- t
}
}()
requests2 := make(chan int, 5)
for i := 1; i <= 5; i++ {
requests2 <- i
}
close(requests2)
for req := range requests2 {
<-limiter2
fmt.Println("request", req, time.Now())
}
// process 3 and wait 1 second
limiter3 := time.Tick(time.Second * 1)
requests3 := make(chan int, 30)
for i := 1; i <= 30; i++ {
requests3 <- i
}
close(requests3)
index := 1
for req := range requests3 {
fmt.Println("request", req)
if index % 3 == 0 {
<-limiter3
}
index++
}
}
|
#!/bin/sh
# Launch Model Getter Script
MODEL=$(/legato/systems/current/bin/cm info device)
echo "$MODEL" |
#!/bin/bash
## Process command line flags ##
source ${SCRIPTS_DIR}/lib/shflags
DEFINE_string 'tag' "${DEV_VERSION}" "Tag to set for the local image"
DEFINE_string 'repo' 'quay.io/submariner' "Quay.io repo to use for the image"
DEFINE_string 'image' '' "Image name to build" 'i'
DEFINE_string 'dockerfile' '' "Dockerfile to build from" 'f'
DEFINE_string 'buildargs' '' "Build arguments to pass to 'docker build'"
DEFINE_boolean 'cache' true "Use cached layers from latest image"
DEFINE_string 'platform' '' 'Platforms to target'
DEFINE_string 'hash' '' "File to write the hash to" 'h'
DEFINE_string 'oci' '' 'File to write an OCI tarball to instead of an image in the local registry'
FLAGS "$@" || exit $?
eval set -- "${FLAGS_ARGV}"
tag="${FLAGS_tag}"
repo="${FLAGS_repo}"
image="${FLAGS_image}"
dockerfile="${FLAGS_dockerfile}"
buildargs="${FLAGS_buildargs}"
platform="${FLAGS_platform}"
hashfile="${FLAGS_hash}"
ocifile="${FLAGS_oci}"
[[ "${FLAGS_cache}" = "${FLAGS_TRUE}" ]] && cache=true || cache=false
[[ -n "${image}" ]] || { echo "The image to build must be specified!"; exit 1; }
[[ -n "${dockerfile}" ]] || { echo "The dockerfile to build from must be specified!"; exit 1; }
[[ -n "${hashfile}" ]] || { echo "The file to write the hash to must be specified!"; exit 1; }
if [[ "${platform}" =~ , && -z "${ocifile}" ]]; then
echo Multi-arch builds require OCI output, please specify --oci
exit 1
fi
source ${SCRIPTS_DIR}/lib/debug_functions
set -e
local_image=${repo}/${image}:${tag}
cache_image=${repo}/${image}:${CUTTING_EDGE}
# When using cache pull latest image from the repo, so that its layers may be reused.
cache_flag=''
if [[ "$cache" = true ]]; then
cache_flag="--cache-from ${cache_image}"
if [[ -z "$(docker image ls -q ${cache_image})" ]]; then
docker pull ${cache_image} || :
fi
# The shellcheck linting tool recommends piping to a while read loop, but that doesn't work for us
# because the while loop ends up in a subshell
# shellcheck disable=SC2013
for parent in $(awk '/FROM/ {
for (i = 2; i <= NF; i++) {
if ($i == "AS") next;
if (!($i ~ /^--platform/ || $i ~ /scratch/))
print gensub("\\${BASE_BRANCH}", ENVIRON["BASE_BRANCH"], "g", $i)
}
}' "${dockerfile}"); do
cache_flag+=" --cache-from ${parent}"
docker pull ${parent} || :
done
fi
output_flag=--load
[[ -z "${ocifile}" ]] || output_flag="--output=type=oci,dest=${ocifile}"
# Default to linux/amd64 (for CI); platforms match Go OS/arch
if command -v "${GO:-go}" >/dev/null; then
default_platform="$(${GO:-go} env GOOS)/$(${GO:-go} env GOARCH)"
else
echo Unable to determine default container image platform, assuming linux/amd64
default_platform=linux/amd64
fi
[[ -n "$platform" ]] || platform="$default_platform"
# Rebuild the image to update any changed layers and tag it back so it will be used.
buildargs_flag="--build-arg BUILDKIT_INLINE_CACHE=1 --build-arg BASE_BRANCH=${BASE_BRANCH}"
[[ -z "${buildargs}" ]] || buildargs_flag="${buildargs_flag} --build-arg ${buildargs}"
if [[ "${platform}" != "${default_platform}" ]] && docker buildx version > /dev/null 2>&1; then
docker buildx use buildx_builder || docker buildx create --name buildx_builder --use
docker buildx build ${output_flag} -t ${local_image} ${cache_flag} -f ${dockerfile} --iidfile "${hashfile}" --platform ${platform} ${buildargs_flag} .
else
# Fall back to plain BuildKit
if [[ "${platform}" != "${default_platform}" ]]; then
echo "WARNING: buildx isn't available, cross-arch builds won't work as expected"
fi
DOCKER_BUILDKIT=1 docker build -t ${local_image} ${cache_flag} -f ${dockerfile} --iidfile "${hashfile}" ${buildargs_flag} .
fi
# We can only tag the image in non-OCI mode
[[ -n "${ocifile}" ]] || docker tag ${local_image} ${cache_image}
|
package org.junithelper.plugin.action;
import java.io.File;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IWorkspaceRoot;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.ide.IDE;
import org.junithelper.core.config.Configuration;
import org.junithelper.core.config.ConfigurationLoader;
import org.junithelper.core.config.extension.ExtConfiguration;
import org.junithelper.core.config.extension.ExtConfigurationLoader;
import org.junithelper.core.constant.RegExp;
import org.junithelper.core.constant.StringValue;
import org.junithelper.plugin.Activator;
import org.junithelper.plugin.constant.Dialog;
import org.junithelper.plugin.io.PropertiesLoader;
import org.junithelper.plugin.page.PreferenceLoader;
import org.junithelper.plugin.util.EclipseIFileUtil;
import org.junithelper.plugin.util.ResourcePathUtil;
public abstract class AbstractAction extends Action {
public IPreferenceStore store = null;
protected Configuration getConfiguration(IPreferenceStore store, ISelection selection) {
Configuration config = null;
// read from "junithelper-config.properties" if it exsits
// at project root dir
StructuredSelection structuredSelection = null;
if (selection instanceof StructuredSelection) {
// viewer
structuredSelection = (StructuredSelection) selection;
}
if (!isNotSelected(structuredSelection) && !isSelectedSeveral(structuredSelection)) {
String projectName = getProjectName(structuredSelection);
String projectRootPath = getIWorkspaceRoot().findMember(projectName).getLocationURI().getPath()
+ StringValue.DirectorySeparator.General;
String configFilepath = projectRootPath + "junithelper-config.properties";
File configProperites = new File(configFilepath);
if (configProperites.exists()) {
try {
config = new ConfigurationLoader().load(configFilepath);
} catch (Exception e) {
e.printStackTrace();
}
} else {
// load from Eclipse Preference
config = new PreferenceLoader(store).getConfig();
}
String extConfigFilepath = projectRootPath + "junithelper-extension.xml";
File extConfigXML = new File(extConfigFilepath);
if (extConfigXML.exists()) {
try {
ExtConfiguration extConfig = new ExtConfigurationLoader().load(extConfigFilepath);
config.isExtensionEnabled = true;
config.extConfiguration = extConfig;
} catch (Exception e) {
e.printStackTrace();
}
}
}
return config;
}
protected PropertiesLoader getPropertiesLoader(String language) {
return new PropertiesLoader(language);
}
// -------------------
// String value
protected String getTestClassNameFromClassName(String className) {
return className + StringValue.JUnit.TestClassNameSuffix + StringValue.FileExtension.JavaFile;
}
protected String getDetectedCharsetFrom(IFile file, String defaultCharset) {
return EclipseIFileUtil.getDetectedCharsetFrom(file, defaultCharset);
}
protected String getClassNameFromResourcePathForTargetClassFile(String resourcePathForTargetClassFile) {
String[] splittedArray = resourcePathForTargetClassFile.split(StringValue.DirectorySeparator.General);
return splittedArray[splittedArray.length - 1].split("\\.")[0];
}
protected String getProjectName(StructuredSelection structuredSelection) {
String pathFromProjectRoot = getPathFromProjectRoot(structuredSelection);
String[] dirArrFromProjectRoot = pathFromProjectRoot.split(StringValue.DirectorySeparator.General);
return dirArrFromProjectRoot[1];
}
// -------------------
// Path
protected String getPathFromProjectRoot(StructuredSelection structuredSelection) {
return ResourcePathUtil.getPathStartsFromProjectRoot(structuredSelection);
}
protected String getResourcePathForTargetClassFile(StructuredSelection structuredSelection) {
// path started from project root
String pathFromProjectRoot = getPathFromProjectRoot(structuredSelection);
// path started from project root
// ex. /{projectName}/src/main/java/hoge/foo/var/TestTarget.java
String[] dirArrFromProjectRoot = pathFromProjectRoot.split(StringValue.DirectorySeparator.General);
// test case file create filesystem path
String resourcePathForTargetClassFile = StringValue.Empty;
int len = dirArrFromProjectRoot.length;
for (int i = 2; i < len; i++) {
resourcePathForTargetClassFile += dirArrFromProjectRoot[i] + StringValue.DirectorySeparator.General;
}
resourcePathForTargetClassFile = resourcePathForTargetClassFile
.replaceAll(RegExp.CRLF, StringValue.Empty)
.replaceFirst("\\.java.+", ".java")
.replace(StringValue.JUnit.TestClassNameSuffix + StringValue.FileExtension.JavaFile,
StringValue.FileExtension.JavaFile);
return resourcePathForTargetClassFile;
}
// -------------------
// Eclipse SDK
protected IPreferenceStore getIPreferenceStore() {
if (store == null) {
store = Activator.getDefault().getPreferenceStore();
}
return store;
}
protected IWorkspaceRoot getIWorkspaceRoot() {
return ResourcesPlugin.getWorkspace().getRoot();
}
protected IWorkbenchPage getIWorkbenchPage() {
return PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage();
}
protected IProject getIProject(String projectName) {
return getIWorkspaceRoot().getProject(projectName);
}
protected IFile getIFile(IProject project, String resourcePath) {
return project.getFile(resourcePath);
}
protected IEditorPart getIEditorPart(IWorkbenchPage page, IFile file) throws Exception {
String editorId = EclipseIFileUtil.getIEditorDescriptorFrom(file).getId();
return IDE.openEditor(page, file, editorId);
}
// -------------------
// selection
protected boolean isNotSelected(StructuredSelection structuredSelection) {
return structuredSelection != null && structuredSelection.size() == 0;
}
protected boolean isSelectedSeveral(StructuredSelection structuredSelection) {
return structuredSelection != null && structuredSelection.size() > 1;
}
// -------------------
// open dialog
protected void openWarningForRequired(PropertiesLoader props) {
MessageDialog.openWarning(new Shell(), props.get(Dialog.Common.title), props.get(Dialog.Common.required));
}
protected void openWarningForResourceRefreshError(PropertiesLoader props) {
MessageDialog.openWarning(new Shell(), props.get(Dialog.Common.title),
props.get(Dialog.Common.resourceRefreshError));
}
protected void openWarningForSelectOneOnly(PropertiesLoader props) {
MessageDialog.openWarning(new Shell(), props.get(Dialog.Common.title), props.get(Dialog.Common.selectOneOnly));
}
protected void openWarningForSelectJavaFile(PropertiesLoader props) {
MessageDialog.openWarning(new Shell(), props.get(Dialog.Common.title), props.get(Dialog.Common.selectJavaFile));
}
protected void openWarning(PropertiesLoader props, String message) {
MessageDialog.openWarning(new Shell(), props.get(Dialog.Common.title), message);
}
protected boolean openConfirm(PropertiesLoader props, String message) {
return MessageDialog.openConfirm(new Shell(), props.get(Dialog.Common.title), message);
}
}
|
<gh_stars>0
'use strict'
// Copy from this file to help setup a command faster
// Remember to check file path for imports
let commandInterface = require('../Interfaces/commandInterface')
let logger = require('../../Util/logger').logger('Command: <name>')
let perm = require('../Permissions/Permissions')
//The name is found in the name. remember to name the command file '<name>Command.js'
let aliases = []
let description = ''
function doSomething (msg, args, settings) {
}
function extraSettingsCheck(cmd, id, message, args) {
switch(cmd) {
}
return false
}
// Settings can be empty -- does not support maps
let settings = {
}
// Change this to reflect access level by either giving it under 1000 score or a default value
let accessLevel = perm.ACCESS_LEVELS.USER
function load() {
return commandInterface.createCommand(
aliases,
description,
doSomething,
settings,
extraSettingsCheck,
accessLevel
)
}
exports.load = load |
<reponame>BU-EC444/esp-idf
/*
* SPDX-FileCopyrightText: 2015-2022 Espressif Systems (Shanghai) CO LTD
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <stdlib.h>
#include <new>
#include <sys/lock.h>
#include "wear_levelling.h"
#include "WL_Config.h"
#include "WL_Ext_Cfg.h"
#include "WL_Flash.h"
#include "WL_Ext_Perf.h"
#include "WL_Ext_Safe.h"
#include "SPI_Flash.h"
#include "Partition.h"
#ifndef MAX_WL_HANDLES
#define MAX_WL_HANDLES 8
#endif // MAX_WL_HANDLES
#ifndef WL_DEFAULT_UPDATERATE
#define WL_DEFAULT_UPDATERATE 16
#endif //WL_DEFAULT_UPDATERATE
#ifndef WL_DEFAULT_TEMP_BUFF_SIZE
#define WL_DEFAULT_TEMP_BUFF_SIZE 32
#endif //WL_DEFAULT_TEMP_BUFF_SIZE
#ifndef WL_DEFAULT_WRITE_SIZE
#define WL_DEFAULT_WRITE_SIZE 16
#endif //WL_DEFAULT_WRITE_SIZE
#ifndef WL_DEFAULT_START_ADDR
#define WL_DEFAULT_START_ADDR 0
#endif //WL_DEFAULT_START_ADDR
#ifndef WL_CURRENT_VERSION
#define WL_CURRENT_VERSION 2
#endif //WL_CURRENT_VERSION
typedef struct {
WL_Flash *instance;
_lock_t lock;
} wl_instance_t;
static wl_instance_t s_instances[MAX_WL_HANDLES];
static _lock_t s_instances_lock;
static const char *TAG = "wear_levelling";
static esp_err_t check_handle(wl_handle_t handle, const char *func);
esp_err_t wl_mount(const esp_partition_t *partition, wl_handle_t *out_handle)
{
// Initialize variables before the first jump to cleanup label
void *wl_flash_ptr = NULL;
WL_Flash *wl_flash = NULL;
void *part_ptr = NULL;
Partition *part = NULL;
_lock_acquire(&s_instances_lock);
esp_err_t result = ESP_OK;
*out_handle = WL_INVALID_HANDLE;
for (size_t i = 0; i < MAX_WL_HANDLES; i++) {
if (s_instances[i].instance == NULL) {
*out_handle = i;
break;
}
}
wl_ext_cfg_t cfg;
cfg.full_mem_size = partition->size;
cfg.start_addr = WL_DEFAULT_START_ADDR;
cfg.version = WL_CURRENT_VERSION;
cfg.sector_size = SPI_FLASH_SEC_SIZE;
cfg.page_size = SPI_FLASH_SEC_SIZE;
cfg.updaterate = WL_DEFAULT_UPDATERATE;
cfg.temp_buff_size = WL_DEFAULT_TEMP_BUFF_SIZE;
cfg.wr_size = WL_DEFAULT_WRITE_SIZE;
// FAT sector size by default will be 512
cfg.fat_sector_size = CONFIG_WL_SECTOR_SIZE;
if (*out_handle == WL_INVALID_HANDLE) {
ESP_LOGE(TAG, "MAX_WL_HANDLES=%d instances already allocated", MAX_WL_HANDLES);
result = ESP_ERR_NO_MEM;
goto out;
}
// Allocate memory for a Partition object, and then initialize the object
// using placement new operator. This way we can recover from out of
// memory condition.
part_ptr = malloc(sizeof(Partition));
if (part_ptr == NULL) {
result = ESP_ERR_NO_MEM;
ESP_LOGE(TAG, "%s: can't allocate Partition", __func__);
goto out;
}
part = new (part_ptr) Partition(partition);
// Same for WL_Flash: allocate memory, use placement new
#if CONFIG_WL_SECTOR_SIZE == 512
#if CONFIG_WL_SECTOR_MODE == 1
wl_flash_ptr = malloc(sizeof(WL_Ext_Safe));
if (wl_flash_ptr == NULL) {
result = ESP_ERR_NO_MEM;
ESP_LOGE(TAG, "%s: can't allocate WL_Ext_Safe", __func__);
goto out;
}
wl_flash = new (wl_flash_ptr) WL_Ext_Safe();
#else
wl_flash_ptr = malloc(sizeof(WL_Ext_Perf));
if (wl_flash_ptr == NULL) {
result = ESP_ERR_NO_MEM;
ESP_LOGE(TAG, "%s: can't allocate WL_Ext_Perf", __func__);
goto out;
}
wl_flash = new (wl_flash_ptr) WL_Ext_Perf();
#endif // CONFIG_WL_SECTOR_MODE
#endif // CONFIG_WL_SECTOR_SIZE
#if CONFIG_WL_SECTOR_SIZE == 4096
wl_flash_ptr = malloc(sizeof(WL_Flash));
if (wl_flash_ptr == NULL) {
result = ESP_ERR_NO_MEM;
ESP_LOGE(TAG, "%s: can't allocate WL_Flash", __func__);
goto out;
}
wl_flash = new (wl_flash_ptr) WL_Flash();
#endif // CONFIG_WL_SECTOR_SIZE
result = wl_flash->config(&cfg, part);
if (ESP_OK != result) {
ESP_LOGE(TAG, "%s: config instance=0x%08x, result=0x%x", __func__, *out_handle, result);
goto out;
}
result = wl_flash->init();
if (ESP_OK != result) {
ESP_LOGE(TAG, "%s: init instance=0x%08x, result=0x%x", __func__, *out_handle, result);
goto out;
}
s_instances[*out_handle].instance = wl_flash;
_lock_init(&s_instances[*out_handle].lock);
_lock_release(&s_instances_lock);
return ESP_OK;
out:
_lock_release(&s_instances_lock);
*out_handle = WL_INVALID_HANDLE;
if (wl_flash) {
wl_flash->~WL_Flash();
free(wl_flash);
}
if (part) {
part->~Partition();
free(part);
}
return result;
}
esp_err_t wl_unmount(wl_handle_t handle)
{
esp_err_t result = ESP_OK;
_lock_acquire(&s_instances_lock);
result = check_handle(handle, __func__);
if (result == ESP_OK) {
// We have to flush state of the component
result = s_instances[handle].instance->flush();
// We use placement new in wl_mount, so call destructor directly
Flash_Access *drv = s_instances[handle].instance->get_drv();
drv->~Flash_Access();
free(drv);
s_instances[handle].instance->~WL_Flash();
free(s_instances[handle].instance);
s_instances[handle].instance = NULL;
_lock_close(&s_instances[handle].lock); // also zeroes the lock variable
}
_lock_release(&s_instances_lock);
return result;
}
esp_err_t wl_erase_range(wl_handle_t handle, size_t start_addr, size_t size)
{
esp_err_t result = check_handle(handle, __func__);
if (result != ESP_OK) {
return result;
}
_lock_acquire(&s_instances[handle].lock);
result = s_instances[handle].instance->erase_range(start_addr, size);
_lock_release(&s_instances[handle].lock);
return result;
}
esp_err_t wl_write(wl_handle_t handle, size_t dest_addr, const void *src, size_t size)
{
esp_err_t result = check_handle(handle, __func__);
if (result != ESP_OK) {
return result;
}
_lock_acquire(&s_instances[handle].lock);
result = s_instances[handle].instance->write(dest_addr, src, size);
_lock_release(&s_instances[handle].lock);
return result;
}
esp_err_t wl_read(wl_handle_t handle, size_t src_addr, void *dest, size_t size)
{
esp_err_t result = check_handle(handle, __func__);
if (result != ESP_OK) {
return result;
}
_lock_acquire(&s_instances[handle].lock);
result = s_instances[handle].instance->read(src_addr, dest, size);
_lock_release(&s_instances[handle].lock);
return result;
}
size_t wl_size(wl_handle_t handle)
{
esp_err_t err = check_handle(handle, __func__);
if (err != ESP_OK) {
return 0;
}
_lock_acquire(&s_instances[handle].lock);
size_t result = s_instances[handle].instance->chip_size();
_lock_release(&s_instances[handle].lock);
return result;
}
size_t wl_sector_size(wl_handle_t handle)
{
esp_err_t err = check_handle(handle, __func__);
if (err != ESP_OK) {
return 0;
}
_lock_acquire(&s_instances[handle].lock);
size_t result = s_instances[handle].instance->sector_size();
_lock_release(&s_instances[handle].lock);
return result;
}
static esp_err_t check_handle(wl_handle_t handle, const char *func)
{
if (handle == WL_INVALID_HANDLE) {
ESP_LOGE(TAG, "%s: invalid handle", func);
return ESP_ERR_NOT_FOUND;
}
if (handle >= MAX_WL_HANDLES) {
ESP_LOGE(TAG, "%s: instance[0x%08x] out of range", func, handle);
return ESP_ERR_INVALID_ARG;
}
if (s_instances[handle].instance == NULL) {
ESP_LOGE(TAG, "%s: instance[0x%08x] not initialized", func, handle);
return ESP_ERR_NOT_FOUND;
}
return ESP_OK;
}
|
<gh_stars>0
import {
detailedPlan,
id,
includeState,
isBuilt,
inputShape,
outputShape,
name,
tags,
description,
detailedState,
detailedStateIds,
detailedTensors,
detailedProcedure,
detailedOperations,
detailedArgIds,
detailedResultIds,
detailedPromiseOutId
} from '../dummy/plan';
describe('State', () => {
test('can be properly constructed', () => {
expect(detailedState.stateIds).toStrictEqual(detailedStateIds);
expect(detailedState.tensors).toStrictEqual(detailedTensors);
});
});
describe('Procedure', () => {
test('can be properly constructed', () => {
expect(detailedProcedure.operations).toStrictEqual(detailedOperations);
expect(detailedProcedure.argIds).toStrictEqual(detailedArgIds);
expect(detailedProcedure.resultIds).toStrictEqual(detailedResultIds);
expect(detailedProcedure.promiseOutId).toStrictEqual(detailedPromiseOutId);
});
});
describe('Plan', () => {
test('can be properly constructed', () => {
expect(detailedPlan.id).toStrictEqual(id);
expect(detailedPlan.procedure).toStrictEqual(detailedProcedure);
expect(detailedPlan.state).toStrictEqual(detailedState);
expect(detailedPlan.includeState).toStrictEqual(includeState);
expect(detailedPlan.isBuilt).toStrictEqual(isBuilt);
expect(detailedPlan.inputShape).toStrictEqual(inputShape);
expect(detailedPlan.outputShape).toStrictEqual(outputShape);
expect(detailedPlan.name).toStrictEqual(name);
expect(detailedPlan.tags).toStrictEqual(tags);
expect(detailedPlan.description).toStrictEqual(description);
});
});
|
helm delete rstudio |
import { Resolve, ActivatedRouteSnapshot, RouterStateSnapshot } from '@angular/router';
import { Injectable } from '@angular/core';
import { Observable } from 'rxjs/Observable';
import 'rxjs/add/observable/of';
import { AppConfigService } from "./app-config.service";
import { UserServiceService } from "./user-service.service";
@Injectable()
export class UserResolver implements Resolve<any>{
constructor(private userServiceService: UserServiceService) {}
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot) {
return this.userServiceService.setUserInfo();
}
}
@Injectable()
export class MasterResolver implements Resolve<any>{
constructor(private appConfigService: AppConfigService) {}
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot) {
return this.appConfigService.testUserInfo();
}
}
|
from eskapade.data_mimic.links.kde_preparation import KDEPreparation
from eskapade.data_mimic.links.kernel_density_estimation import KernelDensityEstimation
from eskapade.data_mimic.links.mixed_variables_simulation import MixedVariablesSimulation
from eskapade.data_mimic.links.resampler import Resampler
from eskapade.data_mimic.links.resample_evaluation import ResampleEvaluation
from eskapade.data_mimic.links.mimic_report import MimicReport
__all__ = ['KDEPreparation', 'KernelDensityEstimation', 'MixedVariablesSimulation', 'Resampler',
'ResampleEvaluation', 'MimicReport']
|
/*
Copyright © 2020 <EMAIL>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cmd
import (
"fmt"
"log"
"math"
"strconv"
"strings"
"time"
tm "github.com/buger/goterm"
"github.com/fatih/color"
api "github.com/jasonbirchall/crypto/pkg/api"
"github.com/m7shapan/njson"
"github.com/spf13/cobra"
)
// Coin defines the current price rate of a coin and subsequently
// a change in rate using a float value.
type Coin struct {
Rate string `njson:"data.coins.0.price"`
Change float64 `njson:"data.coins.0.change"`
}
// coinsArg is gathered using the --coin or -c flag.
var coinsArg []string
var trackCmd = &cobra.Command{
Use: "track",
Short: "Allows you to track the rise and fall of specific coins",
RunE: func(cmd *cobra.Command, args []string) error {
if watch {
// Clear the current screen.
tm.Clear()
// Loop over track command, executing track command every second.
for {
err := loopTrack()
if err != nil {
return err
}
}
} else {
p, err := execute()
if err != nil {
return err
}
fmt.Println(p)
}
return nil
},
}
// execute forms a map and loops its values to create a string to print.
// An example of this would be BTC £12322 | +4.32%
func execute() (string, error) {
var s string
m, err := createMap()
if err != nil {
return "An error occured creating a map:", err
}
for k, v := range m {
s = s + strings.ToUpper(k) + " " + v
}
return s, nil
}
// loopTrack is used to loop over the execute function every second. This function is
// called if the --watch flag is set.
func loopTrack() error {
tm.MoveCursor(1, 1)
tm.Println("Current Time:", time.Now().Format(time.RFC1123), "\n-------------")
p, err := execute()
if err != nil {
return err
}
tm.Println(p)
// Call it every time at the end of rendering
tm.Flush()
time.Sleep(time.Second)
return nil
}
// createMap takes no arguments and calls checkCoins to query an API.
// It returns a map of coin(string): price(string).
func createMap() (map[string]string, error) {
m := make(map[string]string)
for _, c := range coinsArg {
price, err := checkCoins(c)
if err != nil {
return m, err
}
m[c] = price
}
return m, nil
}
// checkCoins takes a coin shorthand as a string, i.e. btc and queries the
// api package to retrieve a collection of bytes. It then calls grabPrice and
// creates a Coin property. Finally, checkCoins will return a string to the main
// Cobra command.
func checkCoins(c string) (string, error) {
data, err := api.Query(c)
if err != nil {
return "An error has occured queying the API:", err
}
price, err := grabPrice(data)
if err != nil {
return "An error has occurred grabbing the json object:", err
}
return price, nil
}
// grabPrice accepts a slice of bytes from the checkCoins function and unmarshalls it into
// a coin object. This object is then converted to a float to show only two decimal places and
// then returns the object values depending on if they're positive or negative.
func grabPrice(body []byte) (string, error) {
var c Coin
pos := color.New(color.FgGreen)
neg := color.New(color.FgRed)
err := njson.Unmarshal([]byte(body), &c)
if err != nil {
log.Fatal(err)
}
// Convert string to float64 to show two decimal places only.
v, err := strconv.ParseFloat(c.Rate, 64)
if err != nil {
log.Fatal(err)
}
// Check to see if the difference is positive or negative. If
// positive then add a + symbol.
isNeg := math.Signbit(c.Change)
if isNeg {
return neg.Sprintf("£%.2f | %.2f%% ", v, c.Change), nil
} else {
return pos.Sprintf("£%.2f | +%.2f%% ", v, c.Change), nil
}
}
func init() {
rootCmd.AddCommand(trackCmd)
trackCmd.Flags().StringSliceVarP(&coinsArg, "coin", "c", []string{}, "")
trackCmd.MarkPersistentFlagRequired("coin")
}
|
"""
Generates a 3-D heart model including ventricles, base and atria.
"""
from __future__ import division
import math
from opencmiss.utils.zinc.field import findOrCreateFieldCoordinates
from scaffoldmaker.annotation.annotationgroup import AnnotationGroup, mergeAnnotationGroups
from scaffoldmaker.meshtypes.meshtype_3d_heartatria2 import MeshType_3d_heartatria2
from scaffoldmaker.meshtypes.meshtype_3d_heartventriclesbase2 import MeshType_3d_heartventriclesbase2
from scaffoldmaker.meshtypes.scaffold_base import Scaffold_base
from scaffoldmaker.utils.meshrefinement import MeshRefinement
class MeshType_3d_heart2(Scaffold_base):
'''
Generates a 3-D heart model including ventricles, base and atria.
'''
@staticmethod
def getName():
return '3D Heart 2'
@staticmethod
def getDefaultOptions(parameterSetName='Default'):
options = MeshType_3d_heartventriclesbase2.getDefaultOptions(parameterSetName)
optionsAtria = MeshType_3d_heartatria2.getDefaultOptions(parameterSetName)
options.update(optionsAtria)
return options
@staticmethod
def getOrderedOptionNames():
optionNames = MeshType_3d_heartventriclesbase2.getOrderedOptionNames()
optionNamesAtria = MeshType_3d_heartatria2.getOrderedOptionNames()
# insert numbers of elements in atria in initial group
for optionName in [
'Number of elements up atria',
'Number of elements around atrial septum']:
optionNames.insert(5, optionName)
optionNamesAtria.remove(optionName)
# remove dependent or repeated options in atria2
optionNamesAtria.remove('LV outlet outer diameter')
for optionName in optionNames:
if optionName in optionNamesAtria:
optionNamesAtria.remove(optionName)
# add remaining atria options
optionNames += optionNamesAtria
# want refinement options last
for optionName in [
'Refine',
'Refine number of elements surface',
'Refine number of elements through LV wall',
'Refine number of elements through RV wall',
'Refine number of elements through atrial wall']:
optionNames.remove(optionName)
optionNames.append(optionName)
return optionNames
@staticmethod
def checkOptions(options):
dependentChanges = MeshType_3d_heartventriclesbase2.checkOptions(options) \
or MeshType_3d_heartatria2.checkOptions(options)
# only works with particular numbers of elements around
options['Number of elements around atrial septum'] = 2
# set dependent outer diameter used in atria2
options['LV outlet outer diameter'] = options['LV outlet inner diameter'] + 2.0*options['LV outlet wall thickness']
return dependentChanges
@classmethod
def generateBaseMesh(cls, region, options):
"""
Generate the base tricubic Hermite mesh.
:param region: Zinc region to define model in. Must be empty.
:param options: Dict containing options. See getDefaultOptions().
:return: list of AnnotationGroup
"""
# set dependent outer diameter used in atria2
options['LV outlet outer diameter'] = options['LV outlet inner diameter'] + 2.0*options['LV outlet wall thickness']
fm = region.getFieldmodule()
fm.beginChange()
coordinates = findOrCreateFieldCoordinates(fm)
cache = fm.createFieldcache()
# generate heartventriclesbase2 model and put atria2 on it
ventriclesAnnotationGroups = MeshType_3d_heartventriclesbase2.generateBaseMesh(region, options)
atriaAnnotationGroups = MeshType_3d_heartatria2.generateBaseMesh(region, options)
annotationGroups = mergeAnnotationGroups(ventriclesAnnotationGroups, atriaAnnotationGroups)
fm.endChange()
return annotationGroups
@classmethod
def refineMesh(cls, meshrefinement, options):
"""
Refine source mesh into separate region, with change of basis.
:param meshrefinement: MeshRefinement, which knows source and target region.
:param options: Dict containing options. See getDefaultOptions().
"""
assert isinstance(meshrefinement, MeshRefinement)
MeshType_3d_heartventriclesbase2.refineMesh(meshrefinement, options)
MeshType_3d_heartatria2.refineMesh(meshrefinement, options)
|
<filename>tracker/src/main/java/tracker/entity_config/ArrayBuilder.java
package tracker.entity_config;
import com.google.common.collect.ImmutableList;
import elasta.orm.entity.core.Field;
import java.util.Collection;
import java.util.List;
/**
* Created by sohan on 7/7/2017.
*/
public interface ArrayBuilder<T> {
ArrayBuilder<T> add(T field);
ArrayBuilder<T> addAll(Collection<T> fields);
T[] build(CreateArrayFunc<T> createArrayFunc);
interface CreateArrayFunc<T> {
T[] apply(List<T> list);
}
static <T> ArrayBuilder<T> create() {
return new ArrayBuilderImpl<>();
}
}
|
<filename>comercial_vue/plugins/vue-progressbar/vue-progressbar.js
import Vue from 'vue'
import VueProgressBar from 'vue-progressbar'
const VueProgessBarOptions = {
color: '#9b7af3',
failedColor: '#f27b7b',
thickness: '7px',
transition: {
speed: '0.2s',
opacity: '0.6s',
termination: 300
},
autoRevert: true,
location: 'top',
inverse: false
}
Vue.use(VueProgressBar, VueProgessBarOptions)
|
package ExerciciosExtras.exercicios.arrayecollections;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
public class MapCollection {
public static void main(String[] args) {
Map<Integer, String> map = new HashMap<>();
map.put(1, "Primeiro Elemento");
map.put(2, "Segundo Elemento");
System.out.println(map.get(2));
map.put(2, "Segundo Elemento Modificado");
System.out.println(map.get(2));
System.out.println(map.size());
System.out.println(map);
System.out.println(map.keySet());
System.out.println(map.values());
System.out.println(map.entrySet());
System.out.println(map.containsKey(5));
System.out.println(map.containsValue("Batata"));
System.out.println(map.containsKey(1));
System.out.println(map.containsValue("Primeiro Elemento"));
for(Entry<Integer, String> value:map.entrySet()){
System.out.println(value.getKey());
System.out.println(value.getValue());
}
}
} |
<gh_stars>0
package com.g4mesoft.net;
import com.g4mesoft.util.Registry;
public final class ProtocolRegistry extends Registry<Protocol> {
private static ProtocolRegistry instance;
private ProtocolRegistry() {
// Connect
addEntry(HandshakeProtocol.class);
// World
addEntry(EntityProtocol.class);
addEntry(WorldProtocol.class);
}
public static ProtocolRegistry getInstance() {
if (instance == null)
instance = new ProtocolRegistry();
return instance;
}
}
|
def binomial(n, k):
result = 1
for i in range(1, k+1):
result = result * (n - i + 1) / i
return result
result = binomial(5, 3)
print(result) |
def permute(input):
res = []
for i in range(len(input)):
for result in permute(input[:i] + input[i + 1:]):
res.append([input[i]] + result)
return res
inputValues = ['a', 'b', 'c']
permutations = permute(inputValues)
for perm in permutations:
print(perm) |
#!/bin/bash
# set defaults
toclevels=4
# print out info
if [[ -z $1 ]]
then
echo "
$0 [start|version]
will build the format documentation from CSV files and a template.
Version = draft|official changes a note in the document
"
exit 1
fi
if [[ "$1" = "start" ]]
then
# parse version from directory
version=draft
else
version=$1
fi
case $version in
official|draft)
author=ces.qwi.feedback@census.gov
;;
esac
cwd=$(pwd)
numversion=${cwd##*/}
# convert the column definitions to CSV
sed 's/ /,/g;s/R N/R,N/; s/,,/,/g; s/,,/,/g; s/,,/,/g; s/, /,/g' column_definitions.txt | tail -n +2 > tmp.csv
# create ascii doc version
asciifile=lehd_public_use_schema.asciidoc
# this revision is used to dynamically download a sample for the version.txt. should be available for both QWI and J2J
versionvintage=latest_release
# versionj2jvintage=$versionvintage
versionj2jvintage=latest_release
versionstate=de
versionurl=https://lehd.ces.census.gov/data/qwi/${versionvintage}/${versionstate}
versionj2jurl=https://lehd.ces.census.gov/data/j2j/${versionj2jvintage}/${versionstate}/j2j
previousvintage=$(cd ..; ls -1d * | grep -E "V[0-9]" | tail -2 | head -1)
echo "= LEHD Public Use Data Schema $numversion" > $asciifile
echo "<${author}>" >> $asciifile
echo "$(date +%d\ %B\ %Y)
// a2x: --dblatex-opts \"-P latex.output.revhistory=0 --param toc.section.depth=${toclevels}\"
:ext-relative: {outfilesuffix}
( link:$(basename $asciifile .asciidoc).pdf[Printable version] )
" >> $asciifile
# A note on the relevance/beta/draft status of this file.
case $version in
draft)
echo "
[IMPORTANT]
.Important
==============================================
This specification is draft. Feedback is welcome. Please write us at link:mailto:${author}?subject=LEHD_Schema_draft[${author}].
==============================================
" >> $asciifile
;;
official)
echo "
[IMPORTANT]
.Important
==============================================
Feedback is welcome. Please write us at link:mailto:${author}?subject=LEHD_Schema[${author}].
==============================================
" >> $asciifile
;;
esac
echo "
Purpose
-------
The public-use data from the Longitudinal Employer-Household Dynamics Program, including the Quarterly Workforce Indicators (QWI)
and Job-to-Job Flows (J2J), are available for download with the following data schema.
These data are available through the LEHD website’s Data page at
https://lehd.ces.census.gov/data/ and through the LED Extraction Tool at https://ledextract.ces.census.gov/.
This document describes the data schema for LEHD files. LEHD-provided SHP files are separately described in link:lehd_shapefiles{ext-relative}[]. For each variable,
a set of allowable values is defined. Definitions are provided as CSV files,
with header variable definitions. Changes relative to the original v4.0 version are listed <<changes,at the end>>.
File naming
-----------
The naming conventions of the data files is documented in link:lehd_csv_naming{ext-relative}[].
Extends
-------
This version reimplements some features from V4.0. Many files compliant with LEHD or QWI Schema v4.0 will also be compliant with this schema, but compatibility is not guaranteed.
Supersedes
----------
This version supersedes ${previousvintage}.
Basic Schema
------------
Each data file is structured as a CSV file. The first columns contain <<identifiers>>, subsequent columns contain <<indicators>>, followed by <<statusflags,status flags>>. In some cases, visually formatted Excel (XLSX) files are also available, containing the same information together with header lines on each sheet.
=== Generic Structure
[width=\"30%\",format=\"csv\",cols=\"<2\",options=\"header\"]
|===================================================
Column name
[ Identifier1 ]
[ Identifier2 ]
[ Identifier3 ]
[ ... ]
[ Indicator 1 ]
[ Indicator 2 ]
[ Indicator 3 ]
[ ... ]
[ Status Flag 1 ]
[ Status Flag 2 ]
[ Status Flag 3 ]
[ ... ]
|===================================================
Note: A full list of indicators for each type of file are shown below in the <<indicators,Indicators>> section.
While all indicators are included in the CSV files, only the requested indicators
will be included in data outputs from the LED Extraction Tool.
<<<
=== [[identifiers]]Identifiers
Records, unless otherwise noted, are parts of time-series data. Unique record identifiers are noted below, by file type.
Identifiers without the year and quarter component can be considered a series identifier.
" >> $asciifile
############################## Identifiers
for arg in lehd_mapping_identifiers.csv
do
name="$(echo ${arg%*.csv}| sed 's/lehd_//; s/_/ for /; s/mapping/Mapping/; s/ident/Ident/')"
echo "==== $name
( link:${arg}[] )
Each of the released files has a set of variables uniquely identifying records ('Identifiers'). The table below relates the set of identifier specifications
to the released files. The actual CSV files containing the identifiers for each set are listed after this table. Each identifier can take on a specified list of values, documented in the section on <<catvars,Categorical Variables>>.
[width=\"80%\",format=\"csv\",cols=\"<3,8*^1\",options=\"header\"]
|===================================================
include::$arg[]
|===================================================
<<<
" >> $asciifile
done
### Hardcode identifier order
for arg in lehd_identifiers_qwi.csv lehd_identifiers_j2j.csv lehd_identifiers_j2jod.csv lehd_identifiers_pseo.csv
do
name="$(echo ${arg%*.csv}| sed 's/lehd_//; s/_/ for /; s/ident/Ident/; s/qwi/QWI/; s/j2jod/J2JOD/; s/j2j/J2J/;
s/pseo/PSEO/' )"
echo "==== $name
( link:${arg}[] )
[width=\"100%\",format=\"csv\",cols=\"2*^1,<3\",options=\"header\"]
|===================================================
include::$arg[]
|===================================================
<<<
" >> $asciifile
done
################################# Variables
echo "
<<<
=== [[indicators]]Indicators
The following tables and associated mapping files
list the indicators available on each file. The descriptor files themselves are structured as follows:
- The ''Indicator Variable'' is the short name of the variable on the CSV files, suitable for machine processing in a wide variety of statistical applications.
- When given, the ''Alternate name'' may appear in related documentation and articles.
- The ''Status Flag'' is used to indicate publication or data quality status (see <<statusflags,Status Flags>>).
- The ''Indicator Name'' is a non-abbreviated version of the ''Indicator Variable''.
- The ''Description'' provides more verbose description of the variable.
- ''Units'' identify the type of variable according to a very simplified taxonomoy (not formalized yet): counts, rates, monetary amounts.
- ''Concept'' classifies the variables into higher-level concepts. The taxonomy for these concepts has not been finalized yet, see link:label_concept_draft.csv[label_concept_draft.csv] for a draft version.
- The ''Base'' indicates the denominator used to compute the statistic, and may be '1'.
==== National QWI and State-Level QWI ====
( link:variables_qwi.csv[variables_qwi.csv] )
[width=\"95%\",format=\"csv\",cols=\"3*^2,<5,<5,<2,<2,^2\",options=\"header\"]
|===================================================
include::variables_qwi.csv[]
|===================================================
<<<
==== National QWI and State-Level QWI Rates ====
Rates are computed from published data, and are provided as a convenience.
( link:variables_qwir.csv[variables_qwir.csv] )
[width=\"95%\",format=\"csv\",cols=\"3*^2,<5,<5,<2,<2,<2\",options=\"header\"]
|===================================================
include::variables_qwir.csv[]
|===================================================
<<<
==== Job-to-Job Flow Counts (J2J)
( link:variables_j2j.csv[] )
[width=\"95%\",format=\"csv\",cols=\"3*^2,<5,<5,<2,<2,^1\",options=\"header\"]
|===================================================
include::variables_j2j.csv[]
|===================================================
<<<
==== Job-to-Job Flow Rates (J2JR)
( link:variables_j2jr.csv[] )
Rates are computed from published data, and are provided as a convenience.
[width=\"95%\",format=\"csv\",cols=\"3*^2,<5,<5,<2,<2,^1\",options=\"header\"]
|===================================================
include::variables_j2jr.csv[]
|===================================================
<<<
==== Job-to-Job Flow Origin-Destination (J2JOD)
( link:variables_j2jod.csv[] )
[width=\"95%\",format=\"csv\",cols=\"3*^2,<5,<5,<2,<2,^1\",options=\"header\"]
|===================================================
include::variables_j2jod.csv[]
|===================================================
<<<
" >> $asciifile
tmp_pseoevars_cols=$(mktemp -p $cwd)
cut -d ',' -f 1,3,5,6,7 variables_pseoe.csv >> $tmp_pseoevars_cols
echo "
==== Post-Secondary Employment Outcomes Earnings (PSEOE)
( link:variables_pseoe.csv[] )
[width=\"95%\",format=\"csv\",cols=\"<1,<3,<5,2*<1\",options=\"header\"]
|===================================================
include::$tmp_pseoevars_cols[]
|===================================================
<<<
" >> $asciifile
tmp_pseofvars_cols=$(mktemp -p $cwd)
cut -d ',' -f 1,3,5,6,7 variables_pseof.csv >> $tmp_pseofvars_cols
echo "
==== Post-Secondary Employment Outcomes Flows (PSEOF)
( link:variables_pseof.csv[] )
[width=\"95%\",format=\"csv\",cols=\"<1,<3,<5,2*<1\",options=\"header\"]
|===================================================
include::$tmp_pseofvars_cols[]
|===================================================
<<<
" >> $asciifile
################################# Variability measures
for arg in $(ls variables_*v.csv)
do
tmpfile=tmp_$arg
head -4 $arg > $tmpfile
echo "...,,,," >> $tmpfile
grep "vt_" $arg | head -3 >> $tmpfile
echo "...,,,," >> $tmpfile
grep "vb_" $arg | head -3 >> $tmpfile
echo "...,,,," >> $tmpfile
grep "vw_" $arg | head -3 >> $tmpfile
echo "...,,,," >> $tmpfile
grep "df_" $arg | head -3 >> $tmpfile
echo "...,,,," >> $tmpfile
grep "mr_" $arg | head -3 >> $tmpfile
done
echo "
<<<
=== [[vmeasures]]Variability Measures
The following tables and associated mapping files
list the variability measures available on each file. The ''Variability Measure'' is the short name of the variable on the CSV files,
suitable for machine processing in a wide variety of statistical applications. When given, the ''Alternate Name'' may appear in related documentation and articles.
The ''Variable Name'' is a more verbose description of the variability measure.
Six variability measures are published:
* Total variability, prefixed by vt_
* Standard error, prefixed by st_, and computed as the square root of Total Variability
* Between-implicate variability, prefixed by vb_
* Average within-implicate variability, prefixed by vw_
* Degrees of freedom, prefixed by df_
* Missingness ratio, prefixed by mr_
A missing variability measure indicates a structural zero in the corresponding indicator. This is currently not associated with a flag.
//Not all indicators have associated variability measures. For more details, see the following document TBD.
==== Generic Structure
[width=\"30%\",format=\"csv\",cols=\"<2\",options=\"header\"]
|===================================================
Column name
[ Identifier1 ]
[ Identifier2 ]
[ Identifier3 ]
[ ... ]
[ Standard error for Indicator 1 ]
[ Standard error for Indicator 2 ]
[ Standard error for Indicator 3 ]
[ ... ]
[ Total variation for Indicator 1 ]
[ Total variation for Indicator 2 ]
[ Total variation for Indicator 3 ]
[ ... ]
[ Between-implicate variability for Indicator 1 ]
[ Between-implicate variability for Indicator 2 ]
[ Between-implicate variability for Indicator 3 ]
[ ... ]
[ Average within-implicate variability for Indicator 1 ]
[ Average within-implicate variability for Indicator 2 ]
[ Average within-implicate variability for Indicator 3 ]
[ ... ]
[ Degrees of freedom for Indicator 1 ]
[ Degrees of freedom for Indicator 2 ]
[ Degrees of freedom for Indicator 3 ]
[ ... ]
[ Missingness ratio for Indicator 1 ]
[ Missingness ratio for Indicator 2 ]
[ Missingness ratio for Indicator 3 ]
[ ... ]
|===================================================
Note: A full list of indicators for each type of file are shown in the <<indicators,Indicators>> section. In the tables below, only a sample
of variability measures are printed, but the complete list is available in the linked CSV schema files.
<<<
==== National QWI and State-Level QWI ====
( link:variables_qwiv.csv[variables_qwiv.csv] )
[width=\"95%\",format=\"csv\",cols=\"2*^2,<5,<5,<2\",options=\"header\"]
|===================================================
include::tmp_variables_qwiv.csv[]
|===================================================
<<<
==== National QWI and State-Level QWI Rates ====
( link:variables_qwirv.csv[variables_qwirv.csv] )
[width=\"95%\",format=\"csv\",cols=\"2*^2,<5,<5,<2\",options=\"header\"]
|===================================================
include::tmp_variables_qwirv.csv[]
|===================================================
<<<
==== Job-to-Job Flow Counts (J2J)
Soon.
//( link:variables_j2j.csv[] )
//[width=\"95%\",format=\"csv\",cols=\"3*^2,<5\",options=\"header\"]
//|===================================================
//include::tmp_variables_j2jv.csv[]
//|===================================================
//<<<
//
==== Job-to-Job Flow Rates (J2JR)
Soon.
//( link:variables_j2jr.csv[] )
//[width=\"95%\",format=\"csv\",cols=\"3*^2,<5\",options=\"header\"]
//|===================================================
//include::tmp_variables_j2jrv.csv[]
//|===================================================
//<<<
==== Job-to-Job Flow Origin-Destination (J2JOD)
Soon.
//( link:variables_j2jod.csv[] )
//[width=\"95%\",format=\"csv\",cols=\"^3,^2,^3,<5\",options=\"header\"]
//|===================================================
//include::tmp_variables_j2jodv.csv[]
//|===================================================
<<<
" >> $asciifile
################################ Formats
echo "
== [[catvars]]Categorical Variables
Categorical variable descriptions are displayed above each table, with the variable name shown in parentheses. Unless otherwise stated, every possible value/label combination for each categorical variable is listed. Please note that not all values will be available in every table.
" >> $asciifile
# we do industry and geo last
for arg in $(ls label_*csv| grep -vE "geo|ind_level|industry|agg_level|flags|fips|stusps|concept_draft|pseo|cip|inst|degree")
do
name=$(echo ${arg%*.csv}| sed 's/label_//')
echo "=== ${name^}
( link:${arg}[] )
[width=\"60%\",format=\"csv\",cols=\"^1,<4\",options=\"header\"]
|===================================================
include::$arg[]
|===================================================
" >> $asciifile
done
################################ Industry formats
# now do industry
name=Industry
echo "<<<
=== $name ===
" >> $asciifile
for arg in $(ls label_ind_level*csv)
do
name="$(echo ${arg%*.csv}| sed 's/lehd_//; s/_/ for /')"
link="$(echo ${arg%*.csv}| sed 's/label_//')"
echo "[[$link]]
==== Industry levels
( link:${arg}[] )
[width=\"60%\",format=\"csv\",cols=\"^1,<4\",options=\"header\"]
|===================================================
include::$arg[]
|===================================================
" >> $asciifile
arg=label_industry.csv
# construct the sample industry file
head -8 $arg > tmp2.csv
echo "...,," >> tmp2.csv
grep -A 4 -B 4 "31-33" $arg | tail -8 >> tmp2.csv
echo "...,," >> tmp2.csv
echo "
==== Industry
( link:${arg}[] )
Only a small subset of available values shown.
The 2017 NAICS (North American Industry Classification System) is used for all years.
QWI releases prior to R2018Q1 used the 2012 NAICS classification (see link:../V4.1.3[Schema v4.1.3]).
For a full listing of all valid 2017 NAICS codes, see https://www.census.gov/cgi-bin/sssd/naics/naicsrch?chart=2017.
[width=\"90%\",format=\"csv\",cols=\"^1,<5,^1\",options=\"header\"]
|===================================================
include::tmp2.csv[]
|===================================================
<<<
" >> $asciifile
done
echo "
=== Educational Institution ===
==== Institution Levels
( link:label_inst_level.csv[] )
Educational institutions are tabulated individually in the current data release.
Future releases may aggregate to institutions to higher levels, such as state or Census Division.
[width=\"60%\",format=\"csv\",cols=\"^1,<4\",options=\"header\"]
|===================================================
include::label_inst_level.csv[]
|===================================================
" >> $asciifile
#Institution rownum
#University of Colorado Boulder 2630
#University of Texas - Austin 32017
#Ohio State University 17398
#University of Michigan (00232500) 11819
#University of Wisconsin - Madison 23062
#Pennsylvania State University (00332900) 19324
echo "
==== Institution
( link:label_institution.csv[] )
Institution identifiers are sourced from the
https://www2.ed.gov/offices/OSFAP/PEPS/dataextracts.html[U.S. Department of Education, Federal Student Aid office].
This list has been supplemented with records for regional groupings of institutions (may be used in future PSEO tabulations).
[width=\"80%\",format=\"csv\",cols=\"^1,<4,^2,3*^1\",options=\"header\"]
|===================================================
include::label_institution.csv[lines=1]
...,,,,,
include::label_institution.csv[lines=2630;32017;17398;11819;23062;19324]
...,,,,,
|===================================================
" >> $asciifile
echo "
=== Degree Level
( link:label_degree_level.csv[] )
The degree levels are sourced from the
https://surveys.nces.ed.gov/ipeds/VisInstructions.aspx?survey=10&id=30080&show=part#chunk_1526[National Center for Education Statistics (NCES), Integrated Postsecondary Education Data System (IPEDS)].
[width=\"60%\",format=\"csv\",cols=\"^1,<4\",options=\"header\"]
|===================================================
include::label_degree_level.csv[]
|===================================================
" >> $asciifile
echo "
=== Classification of Instruction Programs (CIP)
==== CIP Levels
( link:label_cip_level.csv[] )
[width=\"60%\",format=\"csv\",cols=\"^1,<4\",options=\"header\"]
|===================================================
include::label_cip_level.csv[]
|===================================================
" >> $asciifile
echo "
==== CIP Codes
( link:label_cipcode.csv[] )
CIP codes are sourced from the https://nces.ed.gov/ipeds/cipcode/[National Center for Education Statistics (NCES), Integrated Postsecondary Education Data System (IPEDS)].
Data are reported using 2020 CIP codes, for all years.
[width=\"90%\",format=\"csv\",cols=\"^1,<2,^1,^1,<6\",options=\"header\"]
|===================================================
include::label_cipcode.csv[lines=1;2;3;4;5;117;118]
|===================================================
" >> $asciifile
echo "
=== Grad Cohort
\`grad_cohort\` is a 4-digit number representing the first year of the graduation cohort. The number of years in the cohort is reported in the separate <<#_grad_cohort_years>> variable.
====
If \`grad_cohort\`=2010 and \`grad_cohort_years\`=3, then the cell includes graduates from 2010, 2011, and 2012.
====
When tabulating across all cohorts, the value *0000* will be used for grad_cohort.
=== Grad Cohort Years
\`grad_cohort_years\` is the number of years in the cohort of reference (see <<#_grad_cohort>>). It varies by <<#_degree_level>>. Bachelor's degrees (05) are reported in 3 year cohorts, all other degrees are reported in 5 year cohorts. The \`grad_cohort_years\` will take a value (3,5). As tabulations are not done across degree types, the appropriate value will be reported in \`grad_cohort_years\` when \`grad_cohort\`=0000.
" >> $asciifile
################################ Geo formats
# now do geography
name=Geography
# construct the NS file
nsfile=label_fipsnum.csv
#echo "geography,label" > $nsfile
#echo '00,"National (50 States + DC)"' >> $nsfile
#grep -h -E "^[0-9][0-9]," label_geography_??.csv | sort -n -k 1 >> $nsfile
# construct the sample fips file
head -8 $nsfile > tmp.csv
echo "...,," >> tmp.csv
head -50 $nsfile | tail -8 >> tmp.csv
# construct the composite file from separate files
# we clean up line endings at the same time
[[ -f tmp3.csv ]] && rm tmp3.csv
head -1 label_geography_us.csv > label_geography.csv
for arg in $(ls label_geography_*.csv | grep -vE "cbsa")
do
tail -n +2 $arg | unix2dos | dos2unix >> tmp3.csv
done
# split sorting: N, S, C, M, W, B
grep -E ",N$" tmp3.csv | sort -n -k 1 -t , >> label_geography.csv
grep -E ",S$" tmp3.csv | sort -n -k 1 -t , >> label_geography.csv
grep -E ",C$" tmp3.csv | sort -n -k 1 -t , >> label_geography.csv
grep -E ",M$" tmp3.csv | sort -n -k 1 -t , >> label_geography.csv
grep -E ",W$" tmp3.csv | sort -k 1 -t , >> label_geography.csv
grep -E ",B$" tmp3.csv | sort -n -k 1 -t , >> label_geography.csv
grep -E ",D$" tmp3.csv | sort -n -k 1 -t , >> label_geography.csv
# we check that we have the same numbers
# convert to UTF-8
#iconv -t UTF-8 -f ISO-8859-15 label_geography.csv > tmp3.csv
#mv tmp3.csv label_geography.csv
rm tmp3.csv
echo "=== [[geography]]$name ===
" >> $asciifile
for arg in $(ls label_geo_level*csv)
do
name="$(echo ${arg%*.csv}| sed 's/label_//')"
tmp_geo_csv=$(mktemp -p $cwd)
cut -d ',' -f 1,2,3 $arg >> $tmp_geo_csv
echo "[[$name]]
==== [[geolevel]] Geographic Levels
Geography labels for data files are provided in separate files, by scope. Each file 'label_geograpy_SCOPE.csv' may contain one or more types of records as flagged by <<geolevel,geo_level>>. For convenience, a composite file containing all geocodes is available as link:label_geography.csv[].
The 2019 vintage of https://www.census.gov/geographies/mapping-files/time-series/geo/tiger-line-file.html[Census TIGER/Line geography] is used for all tabulations as of the R2020Q1 release.
Shapefiles are described in a link:lehd_shapefiles{ext-relative}[separate document].
( link:${arg}[] )
[width=\"90%\",format=\"csv\",cols=\"^1,<3,<8\",options=\"header\"]
|===================================================
include::$tmp_geo_csv[]
|===================================================
" >> $asciifile
done
tmp_stusps_csv=$(mktemp -p $cwd)
cut -d ',' -f 1,2 label_stusps.csv >> $tmp_stusps_csv
echo "
==== [[geostate]]National and State-Level Values ====
( link:$nsfile[] )
The file link:$nsfile[$nsfile] contains values and labels
for all entities of <<geolevel,geo_level>> 'N' or 'S', and is a summary of separately available files.
[width=\"40%\",format=\"csv\",cols=\"^1,<3,^1\",options=\"header\"]
|===================================================
include::tmp.csv[]
|===================================================
( link:label_geography_division.csv[] )
The file link:label_geography_division.csv[label_geography_division.csv] contains values and labels
for all entities of <<geolevel,geo_level>> 'D'. For more information on which states comprise each division, see the map https://www2.census.gov/geo/pdfs/maps-data/maps/reference/us_regdiv.pdf[here].
[width=\"40%\",format=\"csv\",cols=\"^1,<3,^1\",options=\"header\"]
|===================================================
include::label_geography_division.csv[]
|===================================================
==== [[stusps]]State Postal Codes
Some parts of the schema use (lower or upper-case) state postal codes.
( link:label_stusps.csv[] )
[width=\"40%\",format=\"csv\",cols=\"^1,<2\",options=\"header\"]
|===================================================
include::$tmp_stusps_csv[]
|===================================================
==== [[geosubstate]]Detailed State and Substate Level Values
Files of type 'label_geography_[ST].csv' will contain identifiers and labels for geographic areas entirely comprised within a given state '[ST]'. State-specific parts of cross-state CBSA, in records of type <<geolevel,geo_level>> = M, are present on files of type 'label_geography_[ST].csv'. The file link:label_geography_metro.csv[] contains labels for records of type <<geolevel,geo_level>> = B, for metropolitan areas only.
">> $asciifile
#[IMPORTANT]
#.Important
#==============================================
#The above section should include hyperlinks to
#the appropriate reference.
#==============================================
echo "
[format=\"csv\",width=\"50%\",cols=\"^1,^2,^3\",options=\"header\"]
|===================================================
Scope,Types,Format file" >> $asciifile
for arg in label_geography_us.csv
do
state=$(echo ${arg%*.csv} | awk -F_ ' { print $3 } '| tr [a-z] [A-Z])
echo "$state,N,link:${arg}[]" >> $asciifile
done
for arg in label_geography_division.csv
do
state=$(echo ${arg%*.csv} | awk -F_ ' { print $3 } '| tr [a-z] [A-Z])
echo "$state,D,link:${arg}[]" >> $asciifile
done
for arg in label_geography_metro.csv
do
state=$(echo ${arg%*.csv} | awk -F_ ' { print $3 } '| tr [a-z] [A-Z])
echo "$state,B,link:${arg}[]" >> $asciifile
done
echo "*States*,," >> $asciifile
for arg in $(ls label_geography_??.csv|grep -v geography_us)
do
state=$(echo ${arg%*.csv} | awk -F_ ' { print $3 } '| tr [a-z] [A-Z])
echo "$state,S C W M,link:${arg}[]" >> $asciifile
done
echo "|===================================================" >> $asciifile
################################# Variables
# finish file
nsfile=label_agg_level.csv
nsfileshort=tmp_label_agg_level.csv
head -8 $nsfile > $nsfileshort
echo "...,,,,,,,,,,,,,,,,,,,,,," >> $nsfileshort
head -14 $nsfile | tail -3 >> $nsfileshort
echo "...,,,,,,,,,,,,,,,,,,,,,," >> $nsfileshort
head -31 $nsfile | tail -3 >> $nsfileshort
echo "...,,,,,,,,,,,,,,,,,,,,,," >> $nsfileshort
tmp_nsfileshort_csv=$(mktemp -p $cwd)
cut -d ',' -f 1-9 $nsfileshort >> $tmp_nsfileshort_csv
echo "
<<<
=== Aggregation Level
==== J2J
( link:$nsfile[] )
Measures within the J2J and QWI data products are tabulated on many different dimensions, including demographic characteristics, geography, industry, and other firm characteristics. For Origin-Destination (O-D) tables, characteristics of the origin and destination firm can be tabulated separately. Every tabulation level is assigned a unique aggregation index, represented by the agg_level variable. This index starts from 1, representing a national level grand total (all industries, workers, etc.), and progresses through different combinations of characteristics. There are gaps in the progression to leave space for aggregation levels that may be included in future data releases.
The following variables are included in the link:$nsfile[label_agg_level.csv] file:
[width=\"60%\",format=\"csv\",cols=\"<2,<5\",options=\"header\"]
|===================================================
include::variables_agg_level.csv[]
|===================================================
The characteristics available on an aggregation level are repeated using a series of flags following the standard schema:
- <<_cip_levels,cip_level>> - degree field reporting level of table
- <<_institution_levels,inst_level>> - institution reporting level of table
- <<geolevel,geo_level>> - geographic level of table
- <<ind_level,ind_level>> - industry level of table
- by_ variables - flags indicating other dimensions reported, including ownership, demographics, firm age and size.
A shortened representation of the file is provided below, the complete file is available in the link above.
[width=\"90%\",format=\"csv\",cols=\">1,3*<2,5*<1\",options=\"header\"]
|===================================================
include::$tmp_nsfileshort_csv[]
|===================================================
">> $asciifile
# use all cols
tmp_pseoagg_cols=label_agg_level_pseo.csv
tmp_pseoagg_rows=$(mktemp -p $cwd)
head -5 $tmp_pseoagg_cols > $tmp_pseoagg_rows
echo "...,,,,,,,,,," >> $tmp_pseoagg_rows
head -50 $tmp_pseoagg_cols | tail -3 >> $tmp_pseoagg_rows
echo "...,,,,,,,,,," >> $tmp_pseoagg_rows
head -100 $tmp_pseoagg_cols | tail -3 >> $tmp_pseoagg_rows
echo "...,,,,,,,,,," >> $tmp_pseoagg_rows
echo "
==== PSEO
( link:label_agg_level_pseo.csv[] )
Measures within the PSEO data product can be tabulated by characteristics of the graduate
(e.g., institution attended, instructional program, degree level, etc.) and by characteristics of employment
(state, industry). All measures may not be available on all levels of aggregation - for example,
earnings variables may not be available when tabulating by place and industry of work, though counts are.
Every tabulation level is assigned a unique aggregation index, represented by the agg_level_pseo variable.
This index starts from 1, representing a national level grand total (all institutions, graduates, industries,
etc.), and progresses through different combinations of characteristics. There are gaps in the progression to
leave space for aggregation levels that may be included in future data releases. Aggregation levels that are
available in the PSEO release will be flagged.
The following variables are included in the link:label_agg_level_pseo.csv[] file:
[width=\"60%\",format=\"csv\",cols=\"<2,<5\",options=\"header\"]
|===================================================
Variable,Description
agg_level_pseo, index representing level of aggregation reported on a given record
grad_char,Characteristics of graduate and program
firm_char,Characterstics of place of employment
pseoe,Flag: aggregation level available on PSEO Earnings
pseof,Flag: aggregation level available on PSEO Flows
|===================================================
The characteristics available on an aggregation level are repeated using a series of flags following the standard schema:
- <<#_institution_levels,inst_levels>> - institution level of table
- <<geolevel,geo_level>> - geographic level of table
- <<ind_level,ind_level>> - industry level of table
- by_ variables - flags indicating other dimensions reported, including ownership, demographics, firm age and size.
[width=\"90%\",format=\"csv\",cols=\"^1,2*<3,8*^1\",options=\"header\"]
|===================================================
include::$tmp_pseoagg_rows[]
|===================================================
===== Restricted 4-Digit CIP Tabulations in Earnings Data (PSEOE)
Earnings estimates and counts are provided only at the 2-digit CIP level for Masters and Doctor Research programs (degree levels 07 and 17). Records are included for 4-digit programs observed, but all measures are suppressed.
">> $asciifile
echo "
==== QWI
Aggregation level to be added to QWI in a future release
">> $asciifile
arg=label_flags.csv
echo "
<<<
== [[statusflags]]Status Flags
( link:${arg}[] )
Most indicators in the LEHD data products have associated status flags. Each status flag in the tables above contains one of the following valid values. The values and their interpretation are listed in the tables below. Unless otherwise specified in this section, a status flag will take the values described in 7.1 Standard Status Flags.
=== Standard Status Flags
[IMPORTANT]
.Important
==============================================
Note: Currently, the J2J and PSEO tables only contain status flags '-1', '1', '5'. Status flags with values 10 or above only appear in online applications, not in CSV files.
==============================================
[width=\"80%\",format=\"csv\",cols=\"^1,<4\",options=\"header\"]
|===================================================
include::$arg[]
|===================================================
=== IPEDS Count Status Flag
( link:label_flags_ipeds_count.csv[] )
Graduate counts associated with PSEO earnings tabulations are provided using public use data from the https://nces.ed.gov/ipeds/use-the-data[Integrated Postsecondary Education Data System (IPEDS)]. Counts are linked to graduation cohorts in the PSEO data and included in the PSEOE tables. In a small number of cases, misalignment in programs (CIPCODE) is observed between the IPEDS and PSEO counts. In these cases, the IPEDS counts are adjusted to be consistent with those on PSEO, and the count is flagged accordingly. For higher level aggregations (e.g., all cohorts, all CIPCODE), IPEDS totals may sum over cohorts or programs for which graduate counts were not available. The counts are released but flagged to indicate missing data. IPEDS counts may be suppressed and flagged as not available in some cases when PSEO earnings data do not meet Census Bureau publication standards.
[width=\"80%\",format=\"csv\",cols=\"^1,<4\",options=\"header\"]
|===================================================
include::label_flags_ipeds_count.csv[]
|===================================================
">> $asciifile
arg=variables_version.csv
sed 's/naming convention/link:lehd_csv_naming{ext-relative}[]/' $arg |
sed 's/stusps/<<stusps>>/' |
sed 's/geography/<<geography>>/' > tmp_$arg
echo "
<<<
== [[metadata]]Metadata
( link:${arg}[] )
=== [[metadataqwij2j]]Version Metadata for QWI, J2J, and PSEO Files (version.txt)
Each data release is accompanied by one or more files with metadata on geographic and temporal coverage, in a compact notation. These files follow the following naming convention:
--------------------------------
$(awk -F, ' NR == 5 { print $1 }' naming_convention.csv )
--------------------------------
where each component is described in more detail in link:lehd_csv_naming{ext-relative}[].
The contents contains the following elements:
[width=\"90%\",format=\"csv\",cols=\"<1,<3,<4\",options=\"header\"]
|===================================================
include::tmp_$arg[]
|===================================================
For instance, the metadata for the $versionvintage QWI release of
$(grep -E "^$versionstate," naming_geohi.csv | awk -F, ' { print $2 } ' | sed 's/"//g')
(obtained from $versionurl/version_qwi.txt[here]) has the following content:
--------------------------------
" >> $asciifile
# During the RC phase, this won't work, since it is not published yet
echo "
$(curl $versionurl/version_qwi.txt)
--------------------------------
Similarly, the metadata for the $versionj2jvintage release of
$(grep -E "^$versionstate," naming_geohi.csv | awk -F, ' { print $2 } ' | sed 's/"//g') J2J
tabulations (obtained from $versionj2jurl/version_j2j.txt[here]) has the following content:
--------------------------------
$(curl $versionj2jurl/version_j2j.txt)
--------------------------------
Some J2J metadata may contain multiple lines, as necessary.
The PSEO metadata will contain separate lines for the PSEOE and PSEOF tables. The year range for PSEO tables is based on the <<#_grad_cohort>>, the start year of the graduation cohort. An example for Colorado institutions has the following content:
--------------------------------
PSEOE CO 08 2001-2015 V4.5.0 2019Q1 pseopu_co_20190617_0839
PSEOF CO 08 2001-2015 V4.5.0 2019Q1 pseopu_co_20190617_0839
--------------------------------
=== [[metadataj2jod]]Additional Metadata for J2JOD Files (avail.csv)
(link:variables_avail.csv[])
Because the origin-destination (J2JOD) data link two regions, we provide an auxiliary file with the time range that cells containing data for each geographic pairing may appear in a data release.
[width=\"80%\",format=\"csv\",cols=\"<2,<2,<4\",options=\"header\"]
|===================================================
include::variables_avail.csv[]
|===================================================
The reference region will always be either the origin or the destination. National tabulations contain records where both origin and destination are <<geolevel,geo_level>>=N; state tabulations contain records where <<geolevel,geo_level>> in (N,S); metro tabulations contain records where <<geolevel,geo_level>> in (N,S,B). Data may be suppressed for certain combinations of regions and quarters because the estimates do not meet Census Bureau publication standards.
" >> $asciifile
arg=variables_lags.csv
lagqwi=lags_qwi.csv
lagj2j=lags_j2j.csv
lagj2japp=lags_j2japp.csv
echo "
=== [[metadatalags]]Metadata on Indicator Availability
(link:${arg}[])
Each <<indicators,Indicator>> potentially requires leads and/or lags of data to be computed, and thus may not be available for certain time periods. Only two QWI will be available for all quarters of the time span described by +start+ and +end+ in the <<metadataqwij2j,version.txt>> files: +EmpTotal+ and +Payroll+. The date range for QWI, QWIR, J2J, and J2JR can be found in <<metadataqwij2j,version.txt>>; the date range for J2JOD can be found in <<metadataj2jod,avail.csv>>.
For each indicator, the following files contain the quarters of data required to be available relative to the overall date range described in the metadata for the release:
* link:${lagqwi}[]
* link:${lagj2j}[]
The files are structured as follows:
[width=\"80%\",format=\"csv\",cols=\"<2,<2,<4\",options=\"header\"]
|===================================================
include::$arg[]
|===================================================
<<<
" >> $asciifile
arg=variables_pseo_institutions.csv
echo "
=== [[metadatapseo]] Additional Metadata for PSEO Files
Several additional files within each state release are included to provide information on the institutions within the scope of PSEO. The ALL directory consolidates the individual state files.
==== PSEO Data Partners and Coverage (pseo_[ST]_partners.txt)
This file contains information on PSEO coverage of graduates, as well as the partner organization(s) providing data. This is presented on several lines of a text file, as follows:
* State numeric FIPS code and state name
* Share of statewide graduates covered by PSEO
* Name(s) of data provider(s) (multiple lines, as required)
The share is derived from https://nces.ed.gov/ipeds/use-the-data[Integrated Postsecondary Education Data System (IPEDS)] data, using program graduates from 2015 for degree levels within the scope of PSEO. It calculates the number of graduates from institutions that are available to PSEO as a fraction of graduates from all institutions within IPEDS for the reference state.
A sample file follows:
----
08 Colorado
72% of statewide graduates covered (2015 estimate)
Colorado Department of Higher Education
----
==== Institutions Available Within PSEO (pseo_[ST]_institutions.csv)
(link:${arg}[])
This file provides the list of institutions that are included in the PSEO release. This file is an extract from link:label_institution.csv[].
The files are structured as follows:
[width=\"80%\",format=\"csv\",cols=\"<2,<1,<4\",options=\"header\"]
|===================================================
include::$arg[]
|===================================================
" >> $asciifile
cat CHANGES_SCHEMA.txt >> $asciifile
echo "
<<<
*******************
Released: $(date '+%F')
*******************
" >> $asciifile
echo "$asciifile created"
# create HTML docs
asciidoctor -b html5 -a icons -a toc -a numbered -a linkcss -a toclevels=$toclevels -a sectnumlevels=$toclevels -a outfilesuffix=.html $asciifile
[[ -f $(basename $asciifile .asciidoc).html ]] && echo "$(basename $asciifile .asciidoc).html created"
# create PDF docs, only if an official release
if [[ "$version" = "official" ]]; then
asciidoctor-pdf -a pdf-page-size=letter -a icons -a toc -a numbered -a outfilesuffix=.pdf $asciifile
[[ -f $(basename $asciifile .asciidoc).pdf ]] && echo "$(basename $asciifile .asciidoc).pdf created"
fi
# echo "Deleting tmp files"
rm -f tmp*
|
#!/bin/bash
#judgement
if [[ -a /etc/supervisor/conf.d/supervisord.conf ]]; then
exit 0
fi
#supervisor
cat > /etc/supervisor/conf.d/supervisord.conf <<EOF
[supervisord]
nodaemon=true
[program:postfix]
command=/opt/postfix.sh
[program:rsyslog]
command=/usr/sbin/rsyslogd -n -c3
EOF
############
# postfix
############
cat >> /opt/postfix.sh <<EOF
#!/bin/bash
service postfix start
EOF
chmod +x /opt/postfix.sh
postconf -e myhostname=$maildomain
postconf -F '*/*/chroot = n'
############
# SASL SUPPORT FOR CLIENTS
# The following options set parameters needed by Postfix to enable
# Cyrus-SASL support for authentication of mail clients.
############
# /etc/postfix/main.cf
postconf -e smtpd_sasl_auth_enable=yes
postconf -e broken_sasl_auth_clients=yes
postconf -e smtpd_recipient_restrictions=permit_sasl_authenticated,reject_unauth_destination
# smtpd.conf
cat >> /etc/postfix/sasl/smtpd.conf <<EOF
pwcheck_method: auxprop
auxprop_plugin: sasldb
mech_list: PLAIN LOGIN CRAM-MD5 DIGEST-MD5 NTLM
EOF
# sasldb2
echo $smtp_user | tr , \\n > /tmp/passwd
while IFS=':' read -r _user _pwd; do
echo $_pwd | saslpasswd2 -p -c -u $maildomain $_user
done < /tmp/passwd
chown postfix.sasl /etc/sasldb2
############
# Enable TLS
############
if [[ -n "$(find /etc/postfix/certs -iname *.crt)" && -n "$(find /etc/postfix/certs -iname *.key)" ]]; then
# /etc/postfix/main.cf
postconf -e smtpd_tls_cert_file=$(find /etc/postfix/certs -iname *.crt)
postconf -e smtpd_tls_key_file=$(find /etc/postfix/certs -iname *.key)
chmod 400 /etc/postfix/certs/*.*
# /etc/postfix/master.cf
postconf -M submission/inet="submission inet n - n - - smtpd"
postconf -P "submission/inet/syslog_name=postfix/submission"
postconf -P "submission/inet/smtpd_tls_security_level=encrypt"
postconf -P "submission/inet/smtpd_sasl_auth_enable=yes"
postconf -P "submission/inet/milter_macro_daemon_name=ORIGINATING"
postconf -P "submission/inet/smtpd_recipient_restrictions=permit_sasl_authenticated,reject_unauth_destination"
fi
cat >> /etc/postfix/main.cf <<EOF
smtp_tls_security_level = may
EOF
#############
# opendkim
#############
if [[ -z "$(find /etc/opendkim/domainkeys -iname *.private)" ]]; then
exit 0
fi
cat >> /etc/supervisor/conf.d/supervisord.conf <<EOF
[program:opendkim]
command=/usr/sbin/opendkim -f
EOF
# /etc/postfix/main.cf
postconf -e milter_protocol=2
postconf -e milter_default_action=accept
postconf -e smtpd_milters=inet:localhost:12301
postconf -e non_smtpd_milters=inet:localhost:12301
cat >> /etc/opendkim.conf <<EOF
AutoRestart Yes
AutoRestartRate 10/1h
UMask 002
Syslog yes
SyslogSuccess Yes
LogWhy Yes
Canonicalization relaxed/simple
ExternalIgnoreList refile:/etc/opendkim/TrustedHosts
InternalHosts refile:/etc/opendkim/TrustedHosts
KeyTable refile:/etc/opendkim/KeyTable
SigningTable refile:/etc/opendkim/SigningTable
Mode sv
PidFile /var/run/opendkim/opendkim.pid
SignatureAlgorithm rsa-sha256
UserID opendkim:opendkim
Socket inet:12301@localhost
EOF
cat >> /etc/default/opendkim <<EOF
SOCKET="inet:12301@localhost"
EOF
cat >> /etc/opendkim/TrustedHosts <<EOF
127.0.0.1
localhost
192.168.0.1/24
*.$maildomain
EOF
cat >> /etc/opendkim/KeyTable <<EOF
mail._domainkey.$maildomain $maildomain:mail:$(find /etc/opendkim/domainkeys -iname *.private)
EOF
cat >> /etc/opendkim/SigningTable <<EOF
*@$maildomain mail._domainkey.$maildomain
EOF
chown opendkim:opendkim $(find /etc/opendkim/domainkeys -iname *.private)
chmod 400 $(find /etc/opendkim/domainkeys -iname *.private)
|
<filename>lib/climgur/imgurget.rb
class Climgur::ImgurGet
attr_accessor :images
def initialize
@images = []
end
def scrape_main_page
mainpage = Nokogiri::HTML(open("http://www.imgur.com"))
mainpage.css("div.post").each do |preview|
hash = {}
hash[:description] = preview.css("div.hover p").text
hash[:preview_url] = preview.css(".image-list-link img").first["src"]
hash[:full_url] = preview.css(".image-list-link img").first["src"].gsub('b.jpg', '.jpg') #the only difference in URL between a full image and a preview is the letter 'b' on the end
hash[:type] = preview.css("div.hover div.post-info").text #because I want to filter out galleries and animated gifs
@images << hash
end
@images = @images.select {|x| x[:type].include?('image')} #filtering out only the images
self
end
def display_top_images
puts "the most popular images on imgur right now: \n --------"
self.images.each_with_index do |x, index|
puts self.small_image(x[:preview_url])
puts "(#{index+1}) "+x[:description]
puts "\n"
end
self
end
def display_large_image(index)
puts self.large_image(self.images[index.to_i-1][:full_url])
puts "(#{index}) "+self.images[index.to_i-1][:description]
end
def small_image(url)
AsciiArt.new("http:"+url).to_ascii_art(color: true, width: 44)
end
def large_image(url)
AsciiArt.new("http:"+url).to_ascii_art(color: true, width: 122)
end
end
|
interface Post {
hidden: boolean;
type: string;
title: string;
text?: string;
linkUrl?: string;
image?: string;
date: string;
comments: number;
upvotes: number;
downvotes: number;
_id: string;
}
export default Post;
|
def mean_and_sd(lst):
mean = sum(lst)/len(lst)
diff_from_mean_squared = [(num - mean)**2 for num in lst]
variance = sum(diff_from_mean_squared)/len(lst)
sd = variance**0.5
return mean, sd |
#!/usr/bin/env bash
set -e
sudo service php7.4-fpm start
sudo service nginx start
sleep infinity
|
echo "Running setup $* ..."
/nfs/software/galaxy_dev/tools/intogen/setup $*
|
#!/usr/bin/env bash
# Copyright 2009 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# Generate Go code listing errors and other #defined constant
# values (ENAMETOOLONG etc.), by asking the preprocessor
# about the definitions.
unset LANG
export LC_ALL=C
export LC_CTYPE=C
if test -z "$GOARCH" -o -z "$GOOS"; then
echo 1>&2 "GOARCH or GOOS not defined in environment"
exit 1
fi
# Check that we are using the new build system if we should
if [[ "$GOOS" = "linux" ]] && [[ "$GOARCH" != "sparc64" ]]; then
if [[ "$GOLANG_SYS_BUILD" != "docker" ]]; then
echo 1>&2 "In the new build system, mkerrors should not be called directly."
echo 1>&2 "See README.md"
exit 1
fi
fi
if [[ "$GOOS" = "aix" ]]; then
CC=${CC:-gcc}
else
CC=${CC:-cc}
fi
if [[ "$GOOS" = "solaris" ]]; then
# Assumes GNU versions of utilities in PATH.
export PATH=/usr/gnu/bin:$PATH
fi
uname=$(uname)
includes_AIX='
#include <net/if.h>
#include <net/netopt.h>
#include <netinet/ip_mroute.h>
#include <sys/protosw.h>
#include <sys/stropts.h>
#include <sys/mman.h>
#include <sys/poll.h>
#include <sys/termio.h>
#include <termios.h>
#include <fcntl.h>
#define AF_LOCAL AF_UNIX
'
includes_Darwin='
#define _DARWIN_C_SOURCE
#define KERNEL
#define _DARWIN_USE_64_BIT_INODE
#include <stdint.h>
#include <sys/attr.h>
#include <sys/types.h>
#include <sys/event.h>
#include <sys/ptrace.h>
#include <sys/socket.h>
#include <sys/sockio.h>
#include <sys/sysctl.h>
#include <sys/mman.h>
#include <sys/mount.h>
#include <sys/utsname.h>
#include <sys/wait.h>
#include <sys/xattr.h>
#include <net/bpf.h>
#include <net/if.h>
#include <net/if_types.h>
#include <net/route.h>
#include <netinet/in.h>
#include <netinet/ip.h>
#include <termios.h>
'
includes_DragonFly='
#include <sys/types.h>
#include <sys/event.h>
#include <sys/socket.h>
#include <sys/sockio.h>
#include <sys/stat.h>
#include <sys/sysctl.h>
#include <sys/mman.h>
#include <sys/mount.h>
#include <sys/wait.h>
#include <sys/ioctl.h>
#include <net/bpf.h>
#include <net/if.h>
#include <net/if_types.h>
#include <net/route.h>
#include <netinet/in.h>
#include <termios.h>
#include <netinet/ip.h>
#include <net/ip_mroute/ip_mroute.h>
'
includes_FreeBSD='
#include <sys/capability.h>
#include <sys/param.h>
#include <sys/types.h>
#include <sys/event.h>
#include <sys/socket.h>
#include <sys/sockio.h>
#include <sys/stat.h>
#include <sys/sysctl.h>
#include <sys/mman.h>
#include <sys/mount.h>
#include <sys/wait.h>
#include <sys/ioctl.h>
#include <net/bpf.h>
#include <net/if.h>
#include <net/if_types.h>
#include <net/route.h>
#include <netinet/in.h>
#include <termios.h>
#include <netinet/ip.h>
#include <netinet/ip_mroute.h>
#include <sys/extattr.h>
#if __FreeBSD__ >= 10
#define IFT_CARP 0xf8 // IFT_CARP is deprecated in FreeBSD 10
#undef SIOCAIFADDR
#define SIOCAIFADDR _IOW(105, 26, struct oifaliasreq) // ifaliasreq contains if_data
#undef SIOCSIFPHYADDR
#define SIOCSIFPHYADDR _IOW(105, 70, struct oifaliasreq) // ifaliasreq contains if_data
#endif
'
includes_Linux='
#define _LARGEFILE_SOURCE
#define _LARGEFILE64_SOURCE
#ifndef __LP64__
#define _FILE_OFFSET_BITS 64
#endif
#define _GNU_SOURCE
// <sys/ioctl.h> is broken on powerpc64, as it fails to include definitions of
// these structures. We just include them copied from <bits/termios.h>.
#if defined(__powerpc__)
struct sgttyb {
char sg_ispeed;
char sg_ospeed;
char sg_erase;
char sg_kill;
short sg_flags;
};
struct tchars {
char t_intrc;
char t_quitc;
char t_startc;
char t_stopc;
char t_eofc;
char t_brkc;
};
struct ltchars {
char t_suspc;
char t_dsuspc;
char t_rprntc;
char t_flushc;
char t_werasc;
char t_lnextc;
};
#endif
#include <bits/sockaddr.h>
#include <sys/epoll.h>
#include <sys/eventfd.h>
#include <sys/inotify.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/mount.h>
#include <sys/prctl.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/socket.h>
#include <sys/xattr.h>
#include <linux/if.h>
#include <linux/if_alg.h>
#include <linux/if_arp.h>
#include <linux/if_ether.h>
#include <linux/if_tun.h>
#include <linux/if_packet.h>
#include <linux/if_addr.h>
#include <linux/falloc.h>
#include <linux/filter.h>
#include <linux/fs.h>
#include <linux/kexec.h>
#include <linux/keyctl.h>
#include <linux/magic.h>
#include <linux/memfd.h>
#include <linux/netfilter/nfnetlink.h>
#include <linux/netlink.h>
#include <linux/net_namespace.h>
#include <linux/perf_event.h>
#include <linux/random.h>
#include <linux/reboot.h>
#include <linux/rtnetlink.h>
#include <linux/ptrace.h>
#include <linux/sched.h>
#include <linux/seccomp.h>
#include <linux/sockios.h>
#include <linux/wait.h>
#include <linux/icmpv6.h>
#include <linux/serial.h>
#include <linux/can.h>
#include <linux/vm_sockets.h>
#include <linux/taskstats.h>
#include <linux/genetlink.h>
#include <linux/watchdog.h>
#include <linux/hdreg.h>
#include <linux/rtc.h>
#include <linux/if_xdp.h>
#include <mtd/ubi-user.h>
#include <net/route.h>
#include <asm/termbits.h>
#ifndef MSG_FASTOPEN
#define MSG_FASTOPEN 0x20000000
#endif
#ifndef PTRACE_GETREGS
#define PTRACE_GETREGS 0xc
#endif
#ifndef PTRACE_SETREGS
#define PTRACE_SETREGS 0xd
#endif
#ifndef SOL_NETLINK
#define SOL_NETLINK 270
#endif
#ifdef SOL_BLUETOOTH
// SPARC includes this in /usr/include/sparc64-linux-gnu/bits/socket.h
// but it is already in bluetooth_linux.go
#undef SOL_BLUETOOTH
#endif
// Certain constants are missing from the fs/crypto UAPI
#define FS_KEY_DESC_PREFIX "fscrypt:"
#define FS_KEY_DESC_PREFIX_SIZE 8
#define FS_MAX_KEY_SIZE 64
// XDP socket constants do not appear to be picked up otherwise.
// Copied from samples/bpf/xdpsock_user.c.
#ifndef SOL_XDP
#define SOL_XDP 283
#endif
#ifndef AF_XDP
#define AF_XDP 44
#endif
'
includes_NetBSD='
#include <sys/types.h>
#include <sys/param.h>
#include <sys/event.h>
#include <sys/extattr.h>
#include <sys/mman.h>
#include <sys/mount.h>
#include <sys/socket.h>
#include <sys/sockio.h>
#include <sys/sysctl.h>
#include <sys/termios.h>
#include <sys/ttycom.h>
#include <sys/wait.h>
#include <net/bpf.h>
#include <net/if.h>
#include <net/if_types.h>
#include <net/route.h>
#include <netinet/in.h>
#include <netinet/in_systm.h>
#include <netinet/ip.h>
#include <netinet/ip_mroute.h>
#include <netinet/if_ether.h>
// Needed since <sys/param.h> refers to it...
#define schedppq 1
'
includes_OpenBSD='
#include <sys/types.h>
#include <sys/param.h>
#include <sys/event.h>
#include <sys/mman.h>
#include <sys/mount.h>
#include <sys/socket.h>
#include <sys/sockio.h>
#include <sys/stat.h>
#include <sys/sysctl.h>
#include <sys/termios.h>
#include <sys/ttycom.h>
#include <sys/unistd.h>
#include <sys/wait.h>
#include <net/bpf.h>
#include <net/if.h>
#include <net/if_types.h>
#include <net/if_var.h>
#include <net/route.h>
#include <netinet/in.h>
#include <netinet/in_systm.h>
#include <netinet/ip.h>
#include <netinet/ip_mroute.h>
#include <netinet/if_ether.h>
#include <net/if_bridge.h>
// We keep some constants not supported in OpenBSD 5.5 and beyond for
// the promise of compatibility.
#define EMUL_ENABLED 0x1
#define EMUL_NATIVE 0x2
#define IPV6_FAITH 0x1d
#define IPV6_OPTIONS 0x1
#define IPV6_RTHDR_STRICT 0x1
#define IPV6_SOCKOPT_RESERVED1 0x3
#define SIOCGIFGENERIC 0xc020693a
#define SIOCSIFGENERIC 0x80206939
#define WALTSIG 0x4
'
includes_SunOS='
#include <limits.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/sockio.h>
#include <sys/stat.h>
#include <sys/mman.h>
#include <sys/wait.h>
#include <sys/ioctl.h>
#include <sys/mkdev.h>
#include <net/bpf.h>
#include <net/if.h>
#include <net/if_arp.h>
#include <net/if_types.h>
#include <net/route.h>
#include <netinet/in.h>
#include <termios.h>
#include <netinet/ip.h>
#include <netinet/ip_mroute.h>
'
includes='
#include <sys/types.h>
#include <sys/file.h>
#include <fcntl.h>
#include <dirent.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netinet/ip.h>
#include <netinet/ip6.h>
#include <netinet/tcp.h>
#include <errno.h>
#include <sys/signal.h>
#include <signal.h>
#include <sys/resource.h>
#include <time.h>
'
ccflags="$@"
# Write go tool cgo -godefs input.
(
echo package unix
echo
echo '/*'
indirect="includes_$(uname)"
echo "${!indirect} $includes"
echo '*/'
echo 'import "C"'
echo 'import "syscall"'
echo
echo 'const ('
# The gcc command line prints all the #defines
# it encounters while processing the input
echo "${!indirect} $includes" | $CC -x c - -E -dM $ccflags |
awk '
$1 != "#define" || $2 ~ /\(/ || $3 == "" {next}
$2 ~ /^E([ABCD]X|[BIS]P|[SD]I|S|FL)$/ {next} # 386 registers
$2 ~ /^(SIGEV_|SIGSTKSZ|SIGRT(MIN|MAX))/ {next}
$2 ~ /^(SCM_SRCRT)$/ {next}
$2 ~ /^(MAP_FAILED)$/ {next}
$2 ~ /^ELF_.*$/ {next}# <asm/elf.h> contains ELF_ARCH, etc.
$2 ~ /^EXTATTR_NAMESPACE_NAMES/ ||
$2 ~ /^EXTATTR_NAMESPACE_[A-Z]+_STRING/ {next}
$2 !~ /^ECCAPBITS/ &&
$2 !~ /^ETH_/ &&
$2 !~ /^EPROC_/ &&
$2 !~ /^EQUIV_/ &&
$2 !~ /^EXPR_/ &&
$2 ~ /^E[A-Z0-9_]+$/ ||
$2 ~ /^B[0-9_]+$/ ||
$2 ~ /^(OLD|NEW)DEV$/ ||
$2 == "BOTHER" ||
$2 ~ /^CI?BAUD(EX)?$/ ||
$2 == "IBSHIFT" ||
$2 ~ /^V[A-Z0-9]+$/ ||
$2 ~ /^CS[A-Z0-9]/ ||
$2 ~ /^I(SIG|CANON|CRNL|UCLC|EXTEN|MAXBEL|STRIP|UTF8)$/ ||
$2 ~ /^IGN/ ||
$2 ~ /^IX(ON|ANY|OFF)$/ ||
$2 ~ /^IN(LCR|PCK)$/ ||
$2 !~ "X86_CR3_PCID_NOFLUSH" &&
$2 ~ /(^FLU?SH)|(FLU?SH$)/ ||
$2 ~ /^C(LOCAL|READ|MSPAR|RTSCTS)$/ ||
$2 == "BRKINT" ||
$2 == "HUPCL" ||
$2 == "PENDIN" ||
$2 == "TOSTOP" ||
$2 == "XCASE" ||
$2 == "ALTWERASE" ||
$2 == "NOKERNINFO" ||
$2 ~ /^PAR/ ||
$2 ~ /^SIG[^_]/ ||
$2 ~ /^O[CNPFPL][A-Z]+[^_][A-Z]+$/ ||
$2 ~ /^(NL|CR|TAB|BS|VT|FF)DLY$/ ||
$2 ~ /^(NL|CR|TAB|BS|VT|FF)[0-9]$/ ||
$2 ~ /^O?XTABS$/ ||
$2 ~ /^TC[IO](ON|OFF)$/ ||
$2 ~ /^IN_/ ||
$2 ~ /^LOCK_(SH|EX|NB|UN)$/ ||
$2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|ICMP6|TCP|EVFILT|NOTE|EV|SHUT|PROT|MAP|MFD|T?PACKET|MSG|SCM|MCL|DT|MADV|PR)_/ ||
$2 ~ /^TP_STATUS_/ ||
$2 ~ /^FALLOC_/ ||
$2 == "ICMPV6_FILTER" ||
$2 == "SOMAXCONN" ||
$2 == "NAME_MAX" ||
$2 == "IFNAMSIZ" ||
$2 ~ /^CTL_(HW|KERN|MAXNAME|NET|QUERY)$/ ||
$2 ~ /^KERN_(HOSTNAME|OS(RELEASE|TYPE)|VERSION)$/ ||
$2 ~ /^HW_MACHINE$/ ||
$2 ~ /^SYSCTL_VERS/ ||
$2 !~ "MNT_BITS" &&
$2 ~ /^(MS|MNT|UMOUNT)_/ ||
$2 ~ /^TUN(SET|GET|ATTACH|DETACH)/ ||
$2 ~ /^(O|F|E?FD|NAME|S|PTRACE|PT)_/ ||
$2 ~ /^KEXEC_/ ||
$2 ~ /^LINUX_REBOOT_CMD_/ ||
$2 ~ /^LINUX_REBOOT_MAGIC[12]$/ ||
$2 !~ "NLA_TYPE_MASK" &&
$2 ~ /^(NETLINK|NLM|NLMSG|NLA|IFA|IFAN|RT|RTC|RTCF|RTN|RTPROT|RTNH|ARPHRD|ETH_P|NETNSA)_/ ||
$2 ~ /^SIOC/ ||
$2 ~ /^TIOC/ ||
$2 ~ /^TCGET/ ||
$2 ~ /^TCSET/ ||
$2 ~ /^TC(FLSH|SBRKP?|XONC)$/ ||
$2 !~ "RTF_BITS" &&
$2 ~ /^(IFF|IFT|NET_RT|RTM|RTF|RTV|RTA|RTAX)_/ ||
$2 ~ /^BIOC/ ||
$2 ~ /^RUSAGE_(SELF|CHILDREN|THREAD)/ ||
$2 ~ /^RLIMIT_(AS|CORE|CPU|DATA|FSIZE|LOCKS|MEMLOCK|MSGQUEUE|NICE|NOFILE|NPROC|RSS|RTPRIO|RTTIME|SIGPENDING|STACK)|RLIM_INFINITY/ ||
$2 ~ /^PRIO_(PROCESS|PGRP|USER)/ ||
$2 ~ /^CLONE_[A-Z_]+/ ||
$2 !~ /^(BPF_TIMEVAL)$/ &&
$2 ~ /^(BPF|DLT)_/ ||
$2 ~ /^CLOCK_/ ||
$2 ~ /^CAN_/ ||
$2 ~ /^CAP_/ ||
$2 ~ /^ALG_/ ||
$2 ~ /^FS_(POLICY_FLAGS|KEY_DESC|ENCRYPTION_MODE|[A-Z0-9_]+_KEY_SIZE|IOC_(GET|SET)_ENCRYPTION)/ ||
$2 ~ /^GRND_/ ||
$2 ~ /^KEY_(SPEC|REQKEY_DEFL)_/ ||
$2 ~ /^KEYCTL_/ ||
$2 ~ /^PERF_EVENT_IOC_/ ||
$2 ~ /^SECCOMP_MODE_/ ||
$2 ~ /^SPLICE_/ ||
$2 ~ /^SYNC_FILE_RANGE_/ ||
$2 !~ /^AUDIT_RECORD_MAGIC/ &&
$2 !~ /IOC_MAGIC/ &&
$2 ~ /^[A-Z][A-Z0-9_]+_MAGIC2?$/ ||
$2 ~ /^(VM|VMADDR)_/ ||
$2 ~ /^IOCTL_VM_SOCKETS_/ ||
$2 ~ /^(TASKSTATS|TS)_/ ||
$2 ~ /^CGROUPSTATS_/ ||
$2 ~ /^GENL_/ ||
$2 ~ /^STATX_/ ||
$2 ~ /^RENAME/ ||
$2 ~ /^UBI_IOC[A-Z]/ ||
$2 ~ /^UTIME_/ ||
$2 ~ /^XATTR_(CREATE|REPLACE|NO(DEFAULT|FOLLOW|SECURITY)|SHOWCOMPRESSION)/ ||
$2 ~ /^ATTR_(BIT_MAP_COUNT|(CMN|VOL|FILE)_)/ ||
$2 ~ /^FSOPT_/ ||
$2 ~ /^WDIOC_/ ||
$2 ~ /^NFN/ ||
$2 ~ /^XDP_/ ||
$2 ~ /^(HDIO|WIN|SMART)_/ ||
$2 !~ "WMESGLEN" &&
$2 ~ /^W[A-Z0-9]+$/ ||
$2 ~ /^BLK[A-Z]*(GET$|SET$|BUF$|PART$|SIZE)/ {printf("\t%s = C.%s\n", $2, $2)}
$2 ~ /^__WCOREFLAG$/ {next}
$2 ~ /^__W[A-Z0-9]+$/ {printf("\t%s = C.%s\n", substr($2,3), $2)}
{next}
' | sort
echo ')'
) >_const.go
# Pull out the error names for later.
errors=$(
echo '#include <errno.h>' | $CC -x c - -E -dM $ccflags |
awk '$1=="#define" && $2 ~ /^E[A-Z0-9_]+$/ { print $2 }' |
sort
)
# Pull out the signal names for later.
signals=$(
echo '#include <signal.h>' | $CC -x c - -E -dM $ccflags |
awk '$1=="#define" && $2 ~ /^SIG[A-Z0-9]+$/ { print $2 }' |
egrep -v '(SIGSTKSIZE|SIGSTKSZ|SIGRT|SIGMAX64)' |
sort
)
# Again, writing regexps to a file.
echo '#include <errno.h>' | $CC -x c - -E -dM $ccflags |
awk '$1=="#define" && $2 ~ /^E[A-Z0-9_]+$/ { print "^\t" $2 "[ \t]*=" }' |
sort >_error.grep
echo '#include <signal.h>' | $CC -x c - -E -dM $ccflags |
awk '$1=="#define" && $2 ~ /^SIG[A-Z0-9]+$/ { print "^\t" $2 "[ \t]*=" }' |
egrep -v '(SIGSTKSIZE|SIGSTKSZ|SIGRT|SIGMAX64)' |
sort >_signal.grep
echo '// mkerrors.sh' "$@"
echo '// Code generated by the command above; see README.md. DO NOT EDIT.'
echo
echo "// +build ${GOARCH},${GOOS}"
echo
go tool cgo -godefs -- "$@" _const.go >_error.out
cat _error.out | grep -vf _error.grep | grep -vf _signal.grep
echo
echo '// Errors'
echo 'const ('
cat _error.out | grep -f _error.grep | sed 's/=\(.*\)/= syscall.Errno(\1)/'
echo ')'
echo
echo '// Signals'
echo 'const ('
cat _error.out | grep -f _signal.grep | sed 's/=\(.*\)/= syscall.Signal(\1)/'
echo ')'
# Run C program to print error and syscall strings.
(
echo -E "
#include <stdio.h>
#include <stdlib.h>
#include <errno.h>
#include <ctype.h>
#include <string.h>
#include <signal.h>
#define nelem(x) (sizeof(x)/sizeof((x)[0]))
enum { A = 'A', Z = 'Z', a = 'a', z = 'z' }; // avoid need for single quotes below
struct tuple {
int num;
const char *name;
};
struct tuple errors[] = {
"
for i in $errors
do
echo -E ' {'$i', "'$i'" },'
done
echo -E "
};
struct tuple signals[] = {
"
for i in $signals
do
echo -E ' {'$i', "'$i'" },'
done
# Use -E because on some systems bash builtin interprets \n itself.
echo -E '
};
static int
tuplecmp(const void *a, const void *b)
{
return ((struct tuple *)a)->num - ((struct tuple *)b)->num;
}
int
main(void)
{
int i, e;
char buf[1024], *p;
printf("\n\n// Error table\n");
printf("var errorList = [...]struct {\n");
printf("\tnum syscall.Errno\n");
printf("\tname string\n");
printf("\tdesc string\n");
printf("} {\n");
qsort(errors, nelem(errors), sizeof errors[0], tuplecmp);
for(i=0; i<nelem(errors); i++) {
e = errors[i].num;
if(i > 0 && errors[i-1].num == e)
continue;
strcpy(buf, strerror(e));
// lowercase first letter: Bad -> bad, but STREAM -> STREAM.
if(A <= buf[0] && buf[0] <= Z && a <= buf[1] && buf[1] <= z)
buf[0] += a - A;
printf("\t{ %d, \"%s\", \"%s\" },\n", e, errors[i].name, buf);
}
printf("}\n\n");
printf("\n\n// Signal table\n");
printf("var signalList = [...]struct {\n");
printf("\tnum syscall.Signal\n");
printf("\tname string\n");
printf("\tdesc string\n");
printf("} {\n");
qsort(signals, nelem(signals), sizeof signals[0], tuplecmp);
for(i=0; i<nelem(signals); i++) {
e = signals[i].num;
if(i > 0 && signals[i-1].num == e)
continue;
strcpy(buf, strsignal(e));
// lowercase first letter: Bad -> bad, but STREAM -> STREAM.
if(A <= buf[0] && buf[0] <= Z && a <= buf[1] && buf[1] <= z)
buf[0] += a - A;
// cut trailing : number.
p = strrchr(buf, ":"[0]);
if(p)
*p = '\0';
printf("\t{ %d, \"%s\", \"%s\" },\n", e, signals[i].name, buf);
}
printf("}\n\n");
return 0;
}
'
) >_errors.c
$CC $ccflags -o _errors _errors.c && $GORUN ./_errors && rm -f _errors.c _errors _const.go _error.grep _signal.grep _error.out
|
#!/bin/sh
# clj - Clojure launcher script
BREAK_CHARS="\(\){}[],^%$#@\"\";:''|\\"
cljjar='lib/clojure.jar'
cljclass='clojure.lang.Repl'
cljscript='clojure.lang.Script'
cljcompletions='.clj_completions'
dir=$0
while [ -h "$dir" ]; do
ls=`ls -ld "$dir"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
dir="$link"
else
dir=`dirname "$dir"`"/$link"
fi
done
dir=`dirname $dir`
dir=`cd "$dir" > /dev/null && pwd`
cljjar="$dir/../$cljjar"
cp="${PWD}:${cljjar}"
cljcompletions="$dir/../$cljcompletions"
# Add extra jars as specified by `.clojure` file
# Borrowed from <http://github.com/mreid/clojure-framework>
if [ -f .clojure ]; then
cp=$cp:`cat .clojure`
fi
if [ $# -eq 0 ]; then
rlwrap --remember -c -b $BREAK_CHARS -f $cljcompletions java -cp $cp $cljclass
else
scriptname=$1
exec java -classpath $cp $cljscript $scriptname --$*
fi
|
package org.ednovo.gooru.core.api.model;
import java.io.Serializable;
import java.util.Date;
public class EventMapping implements Serializable {
/**
*
*/
private static final long serialVersionUID = -8945989654492560094L;
private Event event;
private Template template;
private String data;
private User associatedBy;
private CustomTableValue status;
private Date createdDate;
public Event getEvent() {
return event;
}
public void setEvent(Event event) {
this.event = event;
}
public Template getTemplate() {
return template;
}
public void setTemplate(Template template) {
this.template = template;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public CustomTableValue getStatus() {
return status;
}
public void setStatus(CustomTableValue status) {
this.status = status;
}
public void setCreatedDate(Date createdDate) {
this.createdDate = createdDate;
}
public Date getCreatedDate() {
return createdDate;
}
public void setAssociatedBy(User associatedBy) {
this.associatedBy = associatedBy;
}
public User getAssociatedBy() {
return associatedBy;
}
}
|
php artisan down
git add .
git commit --m commit_from_server
git pull --rebase
git push
composer install
php artisan cache:clear
php artisan config:clear
php artisan migrate --force
php artisan up
echo 'Deploy staging finished.' |
#!/bin/bash
if [ -z "$1" ]
then
export VERSION='latest'
else
# Strip prefix from tag name so that v3.7.5 becomes 3.7.5
export VERSION=${1#v}
fi
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
docker build -t opennode/waldur-homeport:$VERSION . --build-arg VERSION
docker push "opennode/waldur-homeport:$VERSION"
docker images
|
#!/bin/bash
#The admin interface for OpenVPN
echo "Content-type: text/html"
echo ""
echo "<!doctype html>
<html lang=\"en\">
<head>
<meta charset=\"utf-8\">
<meta name=\"viewport\" content=\"width=device-width, initial-scale=1, shrink-to-fit=no\">
<meta name=\"description\" content=\" A simple OpenVPN server with a web-based admin panel..\">
<meta name=\"author\" content=\"Blaize Stewart\">
<title>Simple OpenVPN Server</title>
<!-- Bootstrap core CSS -->
<link rel=\"stylesheet\" href=\"https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/css/bootstrap.min.css\" >
<meta name=\"theme-color\" content=\"#563d7c\">
<style>
body {
padding-top:100px;
}
.bd-placeholder-img {
font-size: 1.125rem;
text-anchor: middle;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
@media (min-width: 768px) {
.bd-placeholder-img-lg {
font-size: 3.5rem;
}
}
</style>
<!-- Custom styles for this template -->
</head>
<body >
<nav class=\"navbar navbar-expand-md navbar-dark bg-dark fixed-top\">
<a class=\"navbar-brand\" href=\"#\">Simple OpenVPN Server</a>
<button class=\"navbar-toggler\" type=\"button\" data-toggle=\"collapse\" data-target=\"#navbarsExampleDefault\" aria-controls=\"navbarsExampleDefault\" aria-expanded=\"false\" aria-label=\"Toggle navigation\">
<span class=\"navbar-toggler-icon\"></span>
</button>
<div class=\"collapse navbar-collapse\" id=\"navbarsExampleDefault\">
<ul class=\"navbar-nav mr-auto\">
</ul>
</div>
</nav>
<main role=\"main\" class=\"container\">
<div class=\"container\">"
eval `echo "${QUERY_STRING}"|tr '&' ';'`
IP=$(wget -4qO- "http://whatismyip.akamai.com/")
newclient () {
# Generates the custom client.ovpn
cp /etc/openvpn/client-common.txt /etc/openvpn/clients/$1.ovpn
echo "<ca>" >> /etc/openvpn/clients/$1.ovpn
cat /etc/openvpn/easy-rsa/pki/ca.crt >> /etc/openvpn/clients/$1.ovpn
echo "</ca>" >> /etc/openvpn/clients/$1.ovpn
echo "<cert>" >> /etc/openvpn/clients/$1.ovpn
cat /etc/openvpn/easy-rsa/pki/issued/$1.crt >> /etc/openvpn/clients/$1.ovpn
echo "</cert>" >> /etc/openvpn/clients/$1.ovpn
echo "<key>" >> /etc/openvpn/clients/$1.ovpn
cat /etc/openvpn/easy-rsa/pki/private/$1.key >> /etc/openvpn/clients/$1.ovpn
echo "</key>" >> /etc/openvpn/clients/$1.ovpn
echo "<tls-auth>" >> /etc/openvpn/clients/$1.ovpn
cat /etc/openvpn/ta.key >> /etc/openvpn/clients/$1.ovpn
echo "</tls-auth>" >> /etc/openvpn/clients/$1.ovpn
}
cd /etc/openvpn/easy-rsa/
case $option in
"add") #Add a client
./easyrsa build-client-full $client nopass
# Generates the custom client.ovpn
newclient "$client"
echo "<h3>Certificate for client <span style='color:red'>$client</span> added.</h3>"
;;
"revoke") #Revoke a client
echo "<span style='display:none'>"
./easyrsa --batch revoke $client
./easyrsa gen-crl
echo "</span>"
rm -rf pki/reqs/$client.req
rm -rf pki/private/$client.key
rm -rf pki/issued/$client.crt
rm -rf /etc/openvpn/crl.pem
cp /etc/openvpn/easy-rsa/pki/crl.pem /etc/openvpn/crl.pem
# CRL is read with each client connection, when OpenVPN is dropped to nobody
echo "<h3>Certificate for client <span style='color:red'>$client</span> revoked.</h3>"
;;
esac
NUMBEROFCLIENTS=$(tail -n +2 /etc/openvpn/easy-rsa/pki/index.txt | grep -c "^V")
if [[ "$NUMBEROFCLIENTS" = '0' ]]; then
echo "<h3>You have no existing clients.<h3>"
else
echo "<div class=\"container\">"
while read c; do
if [[ $(echo $c | grep -c "^V") = '1' ]]; then
clientName=$(echo $c | cut -d '=' -f 2)
if [[ "$clientName" != "server" ]] ; then
echo "<div class=\"row\"><div class=\"col-md-4\">$clientName</div>"
echo "<div class=\"col-md-2\"><a href='index.sh?option=revoke&client=$clientName'>🗑️ Revoke</a></div>"
echo "<div class=\"col-md-2\"><a target='_blank' href='download.sh?client=$clientName'>📥 Download</a></div></div>"
fi
fi
done </etc/openvpn/easy-rsa/pki/index.txt
echo "</div>"
fi
echo "
<div class=\"container\">
<form action='index.sh' method='get'>
<input type='hidden' name='option' value='add'>
New Client: <input type='text' name='client'><input type='submit' value='Add'>
</form>
</div>
"
echo "</div>
</main>
<script src=\"https://code.jquery.com/jquery-3.4.1.slim.min.js\" integrity=\"sha384-J6qa4849blE2+poT4WnyKhv5vZF5SrPo0iEjwBvKU7imGFAV0wwj1yYfoRSJoZ+n\" crossorigin=\"anonymous\"></script>
<script src=\"https://cdn.jsdelivr.net/npm/popper.js@1.16.0/dist/umd/popper.min.js\" integrity=\"sha384-Q6E9RHvbIyZFJoft+2mJbHaEWldlvI9IOYy5n3zV9zzTtmI3UksdQRVvoxMfooAo\" crossorigin=\"anonymous\"></script>
<script src=\"https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/js/bootstrap.min.js\" integrity=\"sha384-wfSDF2E50Y2D1uUdj0O3uMBJnjuUD4Ih7YwaYd1iqfktj0Uod8GCExl3Og8ifwB6\" crossorigin=\"anonymous\"></script>
</body>
</html>"
exit 0 |
<filename>artifacts/rest-webapp/src/main/java/com/springsource/insight/samples/rest/model/RestfulServiceImpl.java
/**
* Copyright 2009-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.springsource.insight.samples.rest.model;
import java.util.Collection;
import javax.inject.Inject;
import org.springframework.stereotype.Service;
/**
* @author lgoldstein
*/
@Service
public class RestfulServiceImpl implements RestfulService {
private final RestfulRepository _repo;
@Inject
public RestfulServiceImpl(final RestfulRepository repo) {
_repo = repo;
}
/*
* @see com.springsource.insight.samples.rest.RestfulService#findAll()
*/
@Override
public RestfulDataList findAll() {
final Collection<? extends RestfulData> values=_repo.findAll();
if ((values == null) || values.isEmpty()) {
return new RestfulDataList();
}
final RestfulDataList list=new RestfulDataList(values);
for (int i=0; i < list.size(); i++) {
final RestfulData val=list.get(i);
list.set(i, val.clone());
}
return list;
}
/*
* @see com.springsource.insight.samples.rest.RestfulService#getData(long)
*/
@Override
public RestfulData getData(long id) {
final RestfulData value=_repo.getData(id);
if (value != null) {
return value.clone();
}
return null;
}
/*
* @see com.springsource.insight.samples.rest.RestfulService#create(int)
*/
@Override
public RestfulData create(int balance) {
final RestfulData value=_repo.create(balance);
if (value != null) {
return value.clone();
}
return null;
}
/*
* @see com.springsource.insight.samples.rest.RestfulService#setBalance(long, int)
*/
@Override
public RestfulData setBalance(long id, int balance) {
final RestfulData value=_repo.setBalance(id, balance);
if (value != null) {
return value.clone();
}
return null;
}
/*
* @see com.springsource.insight.samples.rest.RestfulService#removeData(long)
*/
@Override
public RestfulData removeData(long id) {
return _repo.removeData(id);
}
}
|
#!/bin/bash
#SBATCH --time=24:00:00
#SBATCH --nodes=1 --ntasks-per-node=2 --cpus-per-task=1
#SBATCH --mem=40G
MAX_SEED=$1
DATASET=$2
HP_SAMPLING=$3
CONTAMINATION=$4
module load Julia/1.5.3-linux-x86_64
module load Python/3.8.2-GCCcore-9.3.0
# load virtualenv
source ${HOME}/AD/bin/activate
export PYTHON="${HOME}/AD/bin/python"
julia --project -e 'using Pkg; Pkg.instantiate();'
julia ./adVAE.jl ${MAX_SEED} $DATASET ${HP_SAMPLING} $CONTAMINATION
|
// Code generated by go-swagger; DO NOT EDIT.
package networks
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/runtime"
"github.com/go-openapi/strfmt"
"github.com/chmurakrajowa/terraform-provider-ochk/ochk/sdk/gen/models"
)
// VcsVirtualMachineGroupGetUsingGETReader is a Reader for the VcsVirtualMachineGroupGetUsingGET structure.
type VcsVirtualMachineGroupGetUsingGETReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *VcsVirtualMachineGroupGetUsingGETReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewVcsVirtualMachineGroupGetUsingGETOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
case 400:
result := NewVcsVirtualMachineGroupGetUsingGETBadRequest()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 404:
result := NewVcsVirtualMachineGroupGetUsingGETNotFound()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
default:
return nil, runtime.NewAPIError("unknown error", response, response.Code())
}
}
// NewVcsVirtualMachineGroupGetUsingGETOK creates a VcsVirtualMachineGroupGetUsingGETOK with default headers values
func NewVcsVirtualMachineGroupGetUsingGETOK() *VcsVirtualMachineGroupGetUsingGETOK {
return &VcsVirtualMachineGroupGetUsingGETOK{}
}
/*VcsVirtualMachineGroupGetUsingGETOK handles this case with default header values.
OK
*/
type VcsVirtualMachineGroupGetUsingGETOK struct {
Payload *models.NetworkGetResponse
}
func (o *VcsVirtualMachineGroupGetUsingGETOK) Error() string {
return fmt.Sprintf("[GET /vcs/networks/{networkId}][%d] vcsVirtualMachineGroupGetUsingGETOK %+v", 200, o.Payload)
}
func (o *VcsVirtualMachineGroupGetUsingGETOK) GetPayload() *models.NetworkGetResponse {
return o.Payload
}
func (o *VcsVirtualMachineGroupGetUsingGETOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.NetworkGetResponse)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewVcsVirtualMachineGroupGetUsingGETBadRequest creates a VcsVirtualMachineGroupGetUsingGETBadRequest with default headers values
func NewVcsVirtualMachineGroupGetUsingGETBadRequest() *VcsVirtualMachineGroupGetUsingGETBadRequest {
return &VcsVirtualMachineGroupGetUsingGETBadRequest{}
}
/*VcsVirtualMachineGroupGetUsingGETBadRequest handles this case with default header values.
Bad request, error occurred. For more details see log messages.
*/
type VcsVirtualMachineGroupGetUsingGETBadRequest struct {
}
func (o *VcsVirtualMachineGroupGetUsingGETBadRequest) Error() string {
return fmt.Sprintf("[GET /vcs/networks/{networkId}][%d] vcsVirtualMachineGroupGetUsingGETBadRequest ", 400)
}
func (o *VcsVirtualMachineGroupGetUsingGETBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
return nil
}
// NewVcsVirtualMachineGroupGetUsingGETNotFound creates a VcsVirtualMachineGroupGetUsingGETNotFound with default headers values
func NewVcsVirtualMachineGroupGetUsingGETNotFound() *VcsVirtualMachineGroupGetUsingGETNotFound {
return &VcsVirtualMachineGroupGetUsingGETNotFound{}
}
/*VcsVirtualMachineGroupGetUsingGETNotFound handles this case with default header values.
Entity not found.
*/
type VcsVirtualMachineGroupGetUsingGETNotFound struct {
}
func (o *VcsVirtualMachineGroupGetUsingGETNotFound) Error() string {
return fmt.Sprintf("[GET /vcs/networks/{networkId}][%d] vcsVirtualMachineGroupGetUsingGETNotFound ", 404)
}
func (o *VcsVirtualMachineGroupGetUsingGETNotFound) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
return nil
}
|
using System;
public class Comentario
{
public long Id { get; set; }
public string Texto { get; set; }
public DateTime FechaCreacion { get; set; }
public long PostId { get; set; }
}
public class CommentManager
{
private long lastCommentId = 0; // Assuming this is initialized from the data store
public long AddComentario(Comentario model)
{
// Assuming the data access and persistence mechanisms are available
// Persist the comment in the data store
model.Id = GenerateUniqueId(); // Generate a unique identifier for the comment
// Associate the comment with the specified post
// Save the comment to the data store
return model.Id; // Return the unique identifier of the added comment
}
private long GenerateUniqueId()
{
// Generate a unique identifier for the comment, for example, using a sequence or UUID
return ++lastCommentId;
}
} |
/*
*
*/
package net.community.chest.jfree.jfreechart.chart.renderer;
import net.community.chest.jfree.jfreechart.ChartReflectiveAttributesProxy;
import org.jfree.chart.renderer.AbstractRenderer;
/**
* <P>Copyright GPLv2</P>
*
* @param <R> Type of {@link AbstractRenderer} being reflected
* @author <NAME>.
* @since Jun 8, 2009 12:02:13 PM
*/
public class AbstractRendererReflectiveProxy<R extends AbstractRenderer> extends ChartReflectiveAttributesProxy<R> {
protected AbstractRendererReflectiveProxy (Class<R> objClass, boolean registerAsDefault)
throws IllegalArgumentException, IllegalStateException
{
super(objClass, registerAsDefault);
}
}
|
# shellcheck shell=bats
load "${MAIN_BASH_IT_DIR?}/test/test_helper.bash"
function local_setup_file() {
setup_libs "colors"
load "${BASH_IT?}/plugins/available/base.plugin.bash"
}
function local_setup() {
# Copy the test fixture to the Bash-it folder
cp -RP "$BASH_IT/test/fixtures/bash_it"/* "$BASH_IT/"
}
# TODO Create global __is_enabled function
# TODO Create global __get_base_name function
# TODO Create global __get_enabled_name function
@test "bash-it: verify that the test fixture is available" {
assert_file_exist "$BASH_IT/profiles/test-bad-component.bash_it"
assert_file_exist "$BASH_IT/profiles/test-bad-type.bash_it"
}
@test "helpers: _command_exists function exists" {
run type -a _command_exists &> /dev/null
assert_success
}
@test "helpers: _command_exists function positive test ls" {
run _command_exists ls
assert_success
}
@test "helpers: _command_exists function positive test bash-it" {
run _command_exists bash-it
assert_success
}
@test "helpers: _command_exists function negative test" {
run _command_exists __addfkds_dfdsjdf
assert_failure
}
@test "helpers: _binary_exists function exists" {
run type -a _binary_exists &> /dev/null
assert_success
}
@test "helpers: _binary_exists function positive test ls" {
run _binary_exists ls
assert_success
}
@test "helpers: _binary_exists function negative test function" {
run _binary_exists _binary_exists
assert_failure
}
@test "helpers: _binary_exists function negative test" {
run _binary_exists __addfkds_dfdsjdf
assert_failure
}
@test "helpers: bash-it help aliases ag" {
run bash-it help aliases "ag"
assert_line -n 0 "ag='ag --smart-case --pager=\"less -MIRFX'"
}
@test "helpers: bash-it help aliases without any aliases enabled" {
run bash-it help aliases
assert_line -n 0 ""
}
@test "helpers: bash-it help plugins" {
run bash-it help plugins
assert_line -n 1 "base:"
}
@test "helpers: bash-it help list aliases without any aliases enabled" {
run _help-list-aliases "$BASH_IT/aliases/available/ag.aliases.bash"
assert_line -n 0 "ag:"
}
@test "helpers: bash-it help list aliases with ag aliases enabled" {
ln -s $BASH_IT/aliases/available/ag.aliases.bash $BASH_IT/aliases/enabled/150---ag.aliases.bash
assert_link_exist "$BASH_IT/aliases/enabled/150---ag.aliases.bash"
run _help-list-aliases "$BASH_IT/aliases/enabled/150---ag.aliases.bash"
assert_line -n 0 "ag:"
}
@test "helpers: bash-it help list aliases with todo.txt-cli aliases enabled" {
ln -s $BASH_IT/aliases/available/todo.txt-cli.aliases.bash $BASH_IT/aliases/enabled/150---todo.txt-cli.aliases.bash
assert_link_exist "$BASH_IT/aliases/enabled/150---todo.txt-cli.aliases.bash"
run _help-list-aliases "$BASH_IT/aliases/enabled/150---todo.txt-cli.aliases.bash"
assert_line -n 0 "todo.txt-cli:"
}
@test "helpers: bash-it help list aliases with docker-compose aliases enabled" {
ln -s $BASH_IT/aliases/available/docker-compose.aliases.bash $BASH_IT/aliases/enabled/150---docker-compose.aliases.bash
assert_link_exist "$BASH_IT/aliases/enabled/150---docker-compose.aliases.bash"
run _help-list-aliases "$BASH_IT/aliases/enabled/150---docker-compose.aliases.bash"
assert_line -n 0 "docker-compose:"
}
@test "helpers: bash-it help list aliases with ag aliases enabled in global directory" {
ln -s $BASH_IT/aliases/available/ag.aliases.bash $BASH_IT/enabled/150---ag.aliases.bash
assert_link_exist "$BASH_IT/enabled/150---ag.aliases.bash"
run _help-list-aliases "$BASH_IT/enabled/150---ag.aliases.bash"
assert_line -n 0 "ag:"
}
@test "helpers: bash-it help aliases one alias enabled in the old directory" {
ln -s $BASH_IT/aliases/available/ag.aliases.bash $BASH_IT/aliases/enabled/150---ag.aliases.bash
assert_link_exist "$BASH_IT/aliases/enabled/150---ag.aliases.bash"
run bash-it help aliases
assert_line -n 0 "ag:"
}
@test "helpers: bash-it help aliases one alias enabled in global directory" {
run bash-it enable alias "ag"
assert_line -n 0 'ag enabled with priority 150.'
assert_link_exist "$BASH_IT/enabled/150---ag.aliases.bash"
run bash-it enable plugin "aws"
assert_line -n 0 'aws enabled with priority 250.'
assert_link_exist "$BASH_IT/enabled/250---aws.plugin.bash"
run bash-it help aliases
assert_line -n 0 "ag:"
assert_line -n 1 "ag='ag --smart-case --pager=\"less -MIRFX'"
}
@test "helpers: enable the todo.txt-cli aliases through the bash-it function" {
run bash-it enable alias "todo.txt-cli"
assert_line -n 0 'todo.txt-cli enabled with priority 150.'
assert_link_exist "$BASH_IT/enabled/150---todo.txt-cli.aliases.bash"
}
@test "helpers: enable the curl aliases" {
run _enable-alias "curl"
assert_line -n 0 'curl enabled with priority 150.'
assert_link_exist "$BASH_IT/enabled/150---curl.aliases.bash"
}
@test "helpers: enable the apm completion through the bash-it function" {
run bash-it enable completion "apm"
assert_line -n 0 'apm enabled with priority 350.'
assert_link_exist "$BASH_IT/enabled/350---apm.completion.bash"
}
@test "helpers: enable the brew completion" {
run _enable-completion "brew"
assert_line -n 0 'brew enabled with priority 375.'
assert_link_exist "$BASH_IT/enabled/375---brew.completion.bash"
}
@test "helpers: enable the node plugin" {
run _enable-plugin "node"
assert_line -n 0 'node enabled with priority 250.'
assert_link_exist "$BASH_IT/enabled/250---node.plugin.bash" "../plugins/available/node.plugin.bash"
}
@test "helpers: enable the node plugin through the bash-it function" {
run bash-it enable plugin "node"
assert_line -n 0 'node enabled with priority 250.'
assert_link_exist "$BASH_IT/enabled/250---node.plugin.bash"
}
@test "helpers: enable the node and nvm plugins through the bash-it function" {
run bash-it enable plugin "node" "nvm"
assert_line -n 0 'node enabled with priority 250.'
assert_line -n 1 'nvm enabled with priority 225.'
assert_link_exist "$BASH_IT/enabled/250---node.plugin.bash"
assert_link_exist "$BASH_IT/enabled/225---nvm.plugin.bash"
}
@test "helpers: enable the foo-unkown and nvm plugins through the bash-it function" {
run bash-it enable plugin "foo-unknown" "nvm"
assert_line -n 0 'sorry, foo-unknown does not appear to be an available plugin.'
assert_line -n 1 'nvm enabled with priority 225.'
assert_link_exist "$BASH_IT/enabled/225---nvm.plugin.bash"
}
@test "helpers: enable the nvm plugin" {
run _enable-plugin "nvm"
assert_line -n 0 'nvm enabled with priority 225.'
assert_link_exist "$BASH_IT/enabled/225---nvm.plugin.bash"
}
@test "helpers: enable an unknown plugin" {
run _enable-plugin "unknown-foo"
assert_line -n 0 'sorry, unknown-foo does not appear to be an available plugin.'
# Check for both old an new structure
assert [ ! -L "$BASH_IT/plugins/enabled/250---unknown-foo.plugin.bash" ]
assert [ ! -L "$BASH_IT/plugins/enabled/unknown-foo.plugin.bash" ]
assert [ ! -L "$BASH_IT/enabled/250---unknown-foo.plugin.bash" ]
assert [ ! -L "$BASH_IT/enabled/unknown-foo.plugin.bash" ]
}
@test "helpers: enable an unknown plugin through the bash-it function" {
run bash-it enable plugin "unknown-foo"
echo "${lines[@]}"
assert_line -n 0 'sorry, unknown-foo does not appear to be an available plugin.'
# Check for both old an new structure
assert [ ! -L "$BASH_IT/plugins/enabled/250---unknown-foo.plugin.bash" ]
assert [ ! -L "$BASH_IT/plugins/enabled/unknown-foo.plugin.bash" ]
assert [ ! -L "$BASH_IT/enabled/250---unknown-foo.plugin.bash" ]
assert [ ! -L "$BASH_IT/enabled/unknown-foo.plugin.bash" ]
}
@test "helpers: disable a plugin that is not enabled" {
run _disable-plugin "sdkman"
assert_line -n 0 'sorry, sdkman does not appear to be an enabled plugin.'
}
@test "helpers: enable and disable the nvm plugin" {
run _enable-plugin "nvm"
assert_line -n 0 'nvm enabled with priority 225.'
assert_link_exist "$BASH_IT/enabled/225---nvm.plugin.bash"
assert [ ! -L "$BASH_IT/plugins/enabled/225---nvm.plugin.bash" ]
run _disable-plugin "nvm"
assert_line -n 0 'nvm disabled.'
assert [ ! -L "$BASH_IT/enabled/225---nvm.plugin.bash" ]
}
@test "helpers: disable the nvm plugin if it was enabled with a priority, but in the component-specific directory" {
ln -s $BASH_IT/plugins/available/nvm.plugin.bash $BASH_IT/plugins/enabled/225---nvm.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/225---nvm.plugin.bash"
assert [ ! -L "$BASH_IT/enabled/225---nvm.plugin.bash" ]
run _disable-plugin "nvm"
assert_line -n 0 'nvm disabled.'
assert [ ! -L "$BASH_IT/plugins/enabled/225---nvm.plugin.bash" ]
assert [ ! -L "$BASH_IT/enabled/225---nvm.plugin.bash" ]
}
@test "helpers: disable the nvm plugin if it was enabled without a priority" {
ln -s $BASH_IT/plugins/available/nvm.plugin.bash $BASH_IT/plugins/enabled/nvm.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/nvm.plugin.bash"
run _disable-plugin "nvm"
assert_line -n 0 'nvm disabled.'
assert [ ! -L "$BASH_IT/plugins/enabled/nvm.plugin.bash" ]
}
@test "helpers: enable the nvm plugin if it was enabled without a priority" {
ln -s $BASH_IT/plugins/available/nvm.plugin.bash $BASH_IT/plugins/enabled/nvm.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/nvm.plugin.bash"
run _enable-plugin "nvm"
assert_line -n 0 'nvm is already enabled.'
assert_link_exist "$BASH_IT/plugins/enabled/nvm.plugin.bash"
assert [ ! -L "$BASH_IT/plugins/enabled/225---nvm.plugin.bash" ]
assert [ ! -L "$BASH_IT/enabled/225---nvm.plugin.bash" ]
}
@test "helpers: enable the nvm plugin if it was enabled with a priority, but in the component-specific directory" {
ln -s $BASH_IT/plugins/available/nvm.plugin.bash $BASH_IT/plugins/enabled/225---nvm.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/225---nvm.plugin.bash"
run _enable-plugin "nvm"
assert_line -n 0 'nvm is already enabled.'
assert [ ! -L "$BASH_IT/plugins/enabled/nvm.plugin.bash" ]
assert_link_exist "$BASH_IT/plugins/enabled/225---nvm.plugin.bash"
assert [ ! -L "$BASH_IT/enabled/225---nvm.plugin.bash" ]
}
@test "helpers: enable the nvm plugin twice" {
run _enable-plugin "nvm"
assert_line -n 0 'nvm enabled with priority 225.'
assert_link_exist "$BASH_IT/enabled/225---nvm.plugin.bash"
run _enable-plugin "nvm"
assert_line -n 0 'nvm is already enabled.'
assert_link_exist "$BASH_IT/enabled/225---nvm.plugin.bash"
}
@test "helpers: profile load command sanity" {
run _bash-it-profile-load "default"
assert_success
assert_link_exist "$BASH_IT/enabled/150---general.aliases.bash"
assert_link_exist "$BASH_IT/enabled/250---base.plugin.bash"
assert_link_exist "$BASH_IT/enabled/800---aliases.completion.bash"
assert_link_exist "$BASH_IT/enabled/350---bash-it.completion.bash"
assert_link_exist "$BASH_IT/enabled/325---system.completion.bash"
}
@test "helpers: profile save command sanity" {
run _enable-plugin "nvm"
run _bash-it-profile-save "test"
assert_line -n 0 "Saving plugins configuration..."
assert_line -n 1 "Saving completion configuration..."
assert_line -n 2 "Saving aliases configuration..."
assert_line -n 3 "All done!"
assert_file_exist "$BASH_IT/profiles/test.bash_it"
}
@test "helpers: profile save creates valid file with only plugin enabled" {
run _enable-plugin "nvm"
run _bash-it-profile-save "test"
run cat "$BASH_IT/profiles/test.bash_it"
assert_line -n 0 "# This file is auto generated by Bash-it. Do not edit manually!"
assert_line -n 1 "# plugins"
assert_line -n 2 "plugins nvm"
}
@test "helpers: profile save creates valid file with only completion enabled" {
run _enable-completion "bash-it"
run _bash-it-profile-save "test"
run cat "$BASH_IT/profiles/test.bash_it"
assert_line -n 0 "# This file is auto generated by Bash-it. Do not edit manually!"
assert_line -n 1 "# completion"
assert_line -n 2 "completion bash-it"
}
@test "helpers: profile save creates valid file with only aliases enabled" {
run _enable-alias "general"
run _bash-it-profile-save "test"
run cat "$BASH_IT/profiles/test.bash_it"
assert_line -n 0 "# This file is auto generated by Bash-it. Do not edit manually!"
assert_line -n 1 "# aliases"
assert_line -n 2 "aliases general"
}
@test "helpers: profile edge case, empty configuration" {
run _bash-it-profile-save "test"
assert_line -n 3 "It seems like no configuration was enabled.."
assert_line -n 4 "Make sure to double check that this is the wanted behavior."
run _enable-alias "general"
run _enable-plugin "base"
run _enable-plugin "alias-completion"
run _enable-completion "bash-it"
run _enable-completion "system"
run _bash-it-profile-load "test"
assert_link_not_exist "$BASH_IT/enabled/150---general.aliases.bash"
assert_link_not_exist "$BASH_IT/enabled/250---base.plugin.bash"
assert_link_not_exist "$BASH_IT/enabled/800---aliases.completion.bash"
assert_link_not_exist "$BASH_IT/enabled/350---bash-it.completion.bash"
assert_link_not_exist "$BASH_IT/enabled/325---system.completion.bash"
}
@test "helpers: profile save and load" {
run _enable-alias "general"
run _enable-plugin "base"
run _enable-plugin "alias-completion"
run _enable-completion "bash-it"
run _enable-completion "system"
run _bash-it-profile-save "test"
assert_success
run _disable-alias "general"
assert_link_not_exist "$BASH_IT/enabled/150---general.aliases.bash"
run _bash-it-profile-load "test"
assert_link_exist "$BASH_IT/enabled/150---general.aliases.bash"
}
@test "helpers: profile load corrupted profile file: bad component" {
run _bash-it-profile-load "test-bad-component"
assert_line -n 1 -p "Bad line(#12) in profile, aborting load..."
}
@test "helpers: profile load corrupted profile file: bad subdirectory" {
run _bash-it-profile-load "test-bad-type"
assert_line -n 1 -p "Bad line(#4) in profile, aborting load..."
}
@test "helpers: profile rm sanity" {
run _bash-it-profile-save "test"
assert_file_exist "$BASH_IT/profiles/test.bash_it"
run _bash-it-profile-rm "test"
assert_line -n 0 "Removed profile 'test' successfully!"
assert_file_not_exist "$BASH_IT/profiles/test.bash_it"
}
@test "helpers: profile rm no params" {
run _bash-it-profile-rm ""
assert_line -n 0 -p "Please specify profile name to remove..."
}
@test "helpers: profile load no params" {
run _bash-it-profile-load ""
assert_line -n 0 -p "Please specify profile name to load, not changing configuration..."
}
@test "helpers: profile rm default" {
run _bash-it-profile-rm "default"
assert_line -n 0 -p "Can not remove the default profile..."
assert_file_exist "$BASH_IT/profiles/default.bash_it"
}
@test "helpers: profile rm bad profile name" {
run _bash-it-profile-rm "notexisting"
assert_line -n 0 -p "Could not find profile 'notexisting'..."
}
@test "helpers: profile list sanity" {
run _bash-it-profile-list
assert_line -n 0 "Available profiles:"
assert_line -n 1 "default"
}
@test "helpers: profile list more profiles" {
run _bash-it-profile-save "cactus"
run _bash-it-profile-save "another"
run _bash-it-profile-save "brother"
run _bash-it-profile-list
assert_line -n 0 "Available profiles:"
assert_line -n 4 "default"
assert_line -n 3 "cactus"
assert_line -n 1 "another"
assert_line -n 2 "brother"
}
@test "helpers: migrate plugins and completions that share the same name" {
ln -s $BASH_IT/completion/available/dirs.completion.bash $BASH_IT/completion/enabled/350---dirs.completion.bash
assert_link_exist "$BASH_IT/completion/enabled/350---dirs.completion.bash"
ln -s $BASH_IT/plugins/available/dirs.plugin.bash $BASH_IT/plugins/enabled/250---dirs.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/250---dirs.plugin.bash"
run _bash-it-migrate
assert_line -n 0 'Migrating plugin dirs.'
assert_line -n 1 'dirs disabled.'
assert_line -n 2 'dirs enabled with priority 250.'
assert_line -n 3 'Migrating completion dirs.'
assert_line -n 4 'dirs disabled.'
assert_line -n 5 'dirs enabled with priority 350.'
assert_line -n 6 'If any migration errors were reported, please try the following: reload && bash-it migrate'
assert_link_exist "$BASH_IT/enabled/350---dirs.completion.bash"
assert_link_exist "$BASH_IT/enabled/250---dirs.plugin.bash"
assert [ ! -L "$BASH_IT/completion/enabled/350----dirs.completion.bash" ]
assert [ ! -L "$BASH_IT/plugins/enabled/250----dirs.plugin.bash" ]
}
@test "helpers: migrate enabled plugins that don't use the new priority-based configuration" {
ln -s $BASH_IT/plugins/available/nvm.plugin.bash $BASH_IT/plugins/enabled/nvm.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/nvm.plugin.bash"
ln -s $BASH_IT/plugins/available/node.plugin.bash $BASH_IT/plugins/enabled/node.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/node.plugin.bash"
ln -s $BASH_IT/aliases/available/todo.txt-cli.aliases.bash $BASH_IT/aliases/enabled/todo.txt-cli.aliases.bash
assert_link_exist "$BASH_IT/aliases/enabled/todo.txt-cli.aliases.bash"
run _enable-plugin "ssh"
assert_link_exist "$BASH_IT/enabled/250---ssh.plugin.bash"
run _bash-it-migrate
assert_line -n 0 'Migrating alias todo.txt-cli.'
assert_line -n 1 'todo.txt-cli disabled.'
assert_line -n 2 'todo.txt-cli enabled with priority 150.'
assert_link_exist "$BASH_IT/enabled/225---nvm.plugin.bash"
assert_link_exist "$BASH_IT/enabled/250---node.plugin.bash"
assert_link_exist "$BASH_IT/enabled/250---ssh.plugin.bash"
assert_link_exist "$BASH_IT/enabled/150---todo.txt-cli.aliases.bash"
assert [ ! -L "$BASH_IT/plugins/enabled/node.plugin.bash" ]
assert [ ! -L "$BASH_IT/plugins/enabled/nvm.plugin.bash" ]
assert [ ! -L "$BASH_IT/aliases/enabled/todo.txt-cli.aliases.bash" ]
}
@test "helpers: migrate enabled plugins that use the new priority-based configuration in the individual directories" {
ln -s $BASH_IT/plugins/available/nvm.plugin.bash $BASH_IT/plugins/enabled/225---nvm.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/225---nvm.plugin.bash"
ln -s $BASH_IT/plugins/available/node.plugin.bash $BASH_IT/plugins/enabled/250---node.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/250---node.plugin.bash"
ln -s $BASH_IT/aliases/available/todo.txt-cli.aliases.bash $BASH_IT/aliases/enabled/250---todo.txt-cli.aliases.bash
assert_link_exist "$BASH_IT/aliases/enabled/250---todo.txt-cli.aliases.bash"
run _enable-plugin "ssh"
assert_link_exist "$BASH_IT/enabled/250---ssh.plugin.bash"
run _bash-it-migrate
assert_link_exist "$BASH_IT/enabled/225---nvm.plugin.bash"
assert_link_exist "$BASH_IT/enabled/250---node.plugin.bash"
assert_link_exist "$BASH_IT/enabled/250---ssh.plugin.bash"
assert_link_exist "$BASH_IT/enabled/150---todo.txt-cli.aliases.bash"
assert [ ! -L "$BASH_IT/plugins/enabled/225----node.plugin.bash" ]
assert [ ! -L "$BASH_IT/plugins/enabled/250----nvm.plugin.bash" ]
assert [ ! -L "$BASH_IT/aliases/enabled/250----todo.txt-cli.aliases.bash" ]
}
@test "helpers: run the migrate command without anything to migrate and nothing enabled" {
run _bash-it-migrate
}
@test "helpers: run the migrate command without anything to migrate" {
run _enable-plugin "ssh"
assert_link_exist "$BASH_IT/enabled/250---ssh.plugin.bash"
run _bash-it-migrate
assert_link_exist "$BASH_IT/enabled/250---ssh.plugin.bash"
}
function __migrate_all_components() {
subdirectory="$1"
one_type="$2"
priority="$3"
for f in "${BASH_IT}/$subdirectory/available/"*.bash
do
to_enable=$(basename $f)
if [ -z "$priority" ]; then
ln -s "../available/$to_enable" "${BASH_IT}/${subdirectory}/enabled/$to_enable"
else
ln -s "../available/$to_enable" "${BASH_IT}/${subdirectory}/enabled/$priority---$to_enable"
fi
done
ls ${BASH_IT}/${subdirectory}/enabled
all_available=$(compgen -G "${BASH_IT}/${subdirectory}/available/*.$one_type.bash" | wc -l | xargs)
all_enabled_old=$(compgen -G "${BASH_IT}/${subdirectory}/enabled/*.$one_type.bash" | wc -l | xargs)
assert_equal "$all_available" "$all_enabled_old"
run bash-it migrate
all_enabled_old_after=$(compgen -G "${BASH_IT}/${subdirectory}/enabled/*.$one_type.bash" | wc -l | xargs)
assert_equal "0" "$all_enabled_old_after"
all_enabled_new_after=$(compgen -G "${BASH_IT}/enabled/*.$one_type.bash" | wc -l | xargs)
assert_equal "$all_enabled_old" "$all_enabled_new_after"
}
@test "helpers: migrate all plugins" {
subdirectory="plugins"
one_type="plugin"
__migrate_all_components "$subdirectory" "$one_type"
}
@test "helpers: migrate all aliases" {
subdirectory="aliases"
one_type="aliases"
__migrate_all_components "$subdirectory" "$one_type"
}
@test "helpers: migrate all completions" {
subdirectory="completion"
one_type="completion"
__migrate_all_components "$subdirectory" "$one_type"
}
@test "helpers: migrate all plugins with previous priority" {
subdirectory="plugins"
one_type="plugin"
__migrate_all_components "$subdirectory" "$one_type" "100"
}
@test "helpers: migrate all aliases with previous priority" {
subdirectory="aliases"
one_type="aliases"
__migrate_all_components "$subdirectory" "$one_type" "100"
}
@test "helpers: migrate all completions with previous priority" {
subdirectory="completion"
one_type="completion"
__migrate_all_components "$subdirectory" "$one_type" "100"
}
@test "helpers: verify that existing components are automatically migrated when something is enabled" {
ln -s $BASH_IT/plugins/available/nvm.plugin.bash $BASH_IT/plugins/enabled/nvm.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/nvm.plugin.bash"
run bash-it enable plugin "node"
assert_line -n 0 'Migrating plugin nvm.'
assert_line -n 1 'nvm disabled.'
assert_line -n 2 'nvm enabled with priority 225.'
assert_line -n 3 'If any migration errors were reported, please try the following: reload && bash-it migrate'
assert_line -n 4 'node enabled with priority 250.'
assert [ ! -L "$BASH_IT/plugins/enabled/nvm.plugin.bash" ]
assert_link_exist "$BASH_IT/enabled/225---nvm.plugin.bash"
assert_link_exist "$BASH_IT/enabled/250---node.plugin.bash"
}
@test "helpers: verify that existing components are automatically migrated when something is disabled" {
ln -s $BASH_IT/plugins/available/nvm.plugin.bash $BASH_IT/plugins/enabled/nvm.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/nvm.plugin.bash"
ln -s $BASH_IT/plugins/available/node.plugin.bash $BASH_IT/plugins/enabled/250---node.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/250---node.plugin.bash"
run bash-it disable plugin "node"
assert_line -n 0 'Migrating plugin node.'
assert_line -n 1 'node disabled.'
assert_line -n 2 'node enabled with priority 250.'
assert_line -n 3 'Migrating plugin nvm.'
assert_line -n 4 'nvm disabled.'
assert_line -n 5 'nvm enabled with priority 225.'
assert_line -n 6 'If any migration errors were reported, please try the following: reload && bash-it migrate'
assert_line -n 7 'node disabled.'
assert [ ! -L "$BASH_IT/plugins/enabled/nvm.plugin.bash" ]
assert_link_exist "$BASH_IT/enabled/225---nvm.plugin.bash"
assert [ ! -L "$BASH_IT/plugins/enabled/250---node.plugin.bash" ]
assert [ ! -L "$BASH_IT/enabled/250---node.plugin.bash" ]
}
@test "helpers: enable all plugins" {
run _enable-plugin "all"
local available=$(find $BASH_IT/plugins/available -name *.plugin.bash | wc -l | xargs)
local enabled=$(find $BASH_IT/enabled -name [0-9]*.plugin.bash | wc -l | xargs)
assert_equal "$available" "$enabled"
}
@test "helpers: disable all plugins" {
run _enable-plugin "all"
local available=$(find $BASH_IT/plugins/available -name *.plugin.bash | wc -l | xargs)
local enabled=$(find $BASH_IT/enabled -name [0-9]*.plugin.bash | wc -l | xargs)
assert_equal "$available" "$enabled"
run _enable-alias "ag"
assert_link_exist "$BASH_IT/enabled/150---ag.aliases.bash"
run _disable-plugin "all"
local enabled2=$(find $BASH_IT/enabled -name [0-9]*.plugin.bash | wc -l | xargs)
assert_equal "0" "$enabled2"
assert_link_exist "$BASH_IT/enabled/150---ag.aliases.bash"
}
@test "helpers: disable all plugins in the old directory structure" {
ln -s $BASH_IT/plugins/available/nvm.plugin.bash $BASH_IT/plugins/enabled/nvm.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/nvm.plugin.bash"
ln -s $BASH_IT/plugins/available/node.plugin.bash $BASH_IT/plugins/enabled/node.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/node.plugin.bash"
local enabled=$(find $BASH_IT/plugins/enabled -name *.plugin.bash | wc -l | xargs)
assert_equal "2" "$enabled"
run _enable-alias "ag"
assert_link_exist "$BASH_IT/enabled/150---ag.aliases.bash"
run _disable-plugin "all"
local enabled2=$(find $BASH_IT/plugins/enabled -name *.plugin.bash | wc -l | xargs)
assert_equal "0" "$enabled2"
assert_link_exist "$BASH_IT/enabled/150---ag.aliases.bash"
}
@test "helpers: disable all plugins in the old directory structure with priority" {
ln -s $BASH_IT/plugins/available/nvm.plugin.bash $BASH_IT/plugins/enabled/250---nvm.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/250---nvm.plugin.bash"
ln -s $BASH_IT/plugins/available/node.plugin.bash $BASH_IT/plugins/enabled/250---node.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/250---node.plugin.bash"
local enabled=$(find $BASH_IT/plugins/enabled -name *.plugin.bash | wc -l | xargs)
assert_equal "2" "$enabled"
run _enable-alias "ag"
assert_link_exist "$BASH_IT/enabled/150---ag.aliases.bash"
run _disable-plugin "all"
local enabled2=$(find $BASH_IT/plugins/enabled -name *.plugin.bash | wc -l | xargs)
assert_equal "0" "$enabled2"
assert_link_exist "$BASH_IT/enabled/150---ag.aliases.bash"
}
@test "helpers: disable all plugins without anything enabled" {
local enabled=$(find $BASH_IT/enabled -name [0-9]*.plugin.bash | wc -l | xargs)
assert_equal "0" "$enabled"
run _enable-alias "ag"
assert_link_exist "$BASH_IT/enabled/150---ag.aliases.bash"
run _disable-plugin "all"
local enabled2=$(find $BASH_IT/enabled -name [0-9]*.plugin.bash | wc -l | xargs)
assert_equal "0" "$enabled2"
assert_link_exist "$BASH_IT/enabled/150---ag.aliases.bash"
}
@test "helpers: enable the ansible aliases through the bash-it function" {
run bash-it enable alias "ansible"
assert_line -n 0 'ansible enabled with priority 150.'
assert_link_exist "$BASH_IT/enabled/150---ansible.aliases.bash"
}
@test "helpers: describe the nvm plugin without enabling it" {
_bash-it-plugins | grep "nvm" | grep "\[ \]"
}
@test "helpers: describe the nvm plugin after enabling it" {
run _enable-plugin "nvm"
assert_line -n 0 'nvm enabled with priority 225.'
assert_link_exist "$BASH_IT/enabled/225---nvm.plugin.bash"
_bash-it-plugins | grep "nvm" | grep "\[x\]"
}
@test "helpers: describe the nvm plugin after enabling it in the old directory" {
ln -s $BASH_IT/plugins/available/nvm.plugin.bash $BASH_IT/plugins/enabled/nvm.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/nvm.plugin.bash"
_bash-it-plugins | grep "nvm" | grep "\[x\]"
}
@test "helpers: describe the nvm plugin after enabling it in the old directory with priority" {
ln -s $BASH_IT/plugins/available/nvm.plugin.bash $BASH_IT/plugins/enabled/225---nvm.plugin.bash
assert_link_exist "$BASH_IT/plugins/enabled/225---nvm.plugin.bash"
_bash-it-plugins | grep "nvm" | grep "\[x\]"
}
@test "helpers: describe the todo.txt-cli aliases without enabling them" {
run _bash-it-aliases
assert_line "todo.txt-cli [ ] todo.txt-cli abbreviations"
}
|
#!/bin/bash
#
# Copyright (C) 2016 The CyanogenMod Project
# Copyright (C) 2017-2020 The LineageOS Project
#
# SPDX-License-Identifier: Apache-2.0
#
set -e
export DEVICE=starlte
export DEVICE_COMMON=universal9810-common
export VENDOR=samsung
"./../../${VENDOR}/${DEVICE_COMMON}/setup-makefiles.sh" "$@"
|
const num: number = 24
console.log(num) |
<filename>Pods/MPITextKit/Sources/MPITextGeometryHelpers.h
//
// MPITextGeometryHelpers.h
// MeituMV
//
// Created by Tpphha on 2019/3/24.
// Copyright © 2019 美图网. All rights reserved.
//
#import <UIKit/UIKit.h>
#import <QuartzCore/QuartzCore.h>
FOUNDATION_EXTERN CGRect MPITextCGRectFitWithContentMode(CGRect rect, CGSize size, UIViewContentMode mode);
/// Get main screen's scale.
FOUNDATION_EXTERN CGFloat MPITextScreenScale(void);
/// Get main screen's size. Height is always larger than width.
FOUNDATION_EXTERN CGSize MPITextScreenSize(void);
/// Get one pixel.
FOUNDATION_EXTERN CGFloat MPITextOnePixel(void);
FOUNDATION_EXTERN CGAffineTransform MPITextCGAffineTransformGetFromViews(UIView *from, UIView *to);
FOUNDATION_EXTERN CGAffineTransform MPITextCGAffineTransformGetFromPoints(CGPoint before[3], CGPoint after[3]);
/// Convert degrees to radians.
static inline CGFloat MPITextDegreesToRadians(CGFloat degrees) {
return degrees * M_PI / 180;
}
/// Convert radians to degrees.
static inline CGFloat MPITextRadiansToDegrees(CGFloat radians) {
return radians * 180 / M_PI;
}
/// Get the transform rotation.
/// @return the rotation in radians [-PI,PI] ([-180°,180°])
static inline CGFloat MPITextCGAffineTransformGetRotation(CGAffineTransform transform) {
return atan2(transform.b, transform.a);
}
/// Convert point to pixel.
static inline CGFloat MPITextCGFloatToPixel(CGFloat value) {
return value * MPITextScreenScale();
}
/// Convert pixel to point.
static inline CGFloat MPITextCGFloatFromPixel(CGFloat value) {
return value / MPITextScreenScale();
}
/// floor point value for pixel-aligned
static inline CGFloat MPITextCGFloatPixelFloor(CGFloat value) {
CGFloat scale = MPITextScreenScale();
return floor(value * scale) / scale;
}
/// round point value for pixel-aligned
static inline CGFloat MPITextCGFloatPixelRound(CGFloat value) {
CGFloat scale = MPITextScreenScale();
return round(value * scale) / scale;
}
/// ceil point value for pixel-aligned
static inline CGFloat MPITextCGFloatPixelCeil(CGFloat value) {
CGFloat scale = MPITextScreenScale();
return ceil((value - FLT_EPSILON) * scale) / scale;
}
/// round point value to .5 pixel for path stroke (odd pixel line width pixel-aligned)
static inline CGFloat MPITextCGFloatPixelHalf(CGFloat value) {
CGFloat scale = MPITextScreenScale();
return (floor(value * scale) + 0.5) / scale;
}
/// floor point value for pixel-aligned
static inline CGPoint MPITextCGPointPixelFloor(CGPoint point) {
CGFloat scale = MPITextScreenScale();
return CGPointMake(floor(point.x * scale) / scale,
floor(point.y * scale) / scale);
}
/// round point value for pixel-aligned
static inline CGPoint MPITextCGPointPixelRound(CGPoint point) {
CGFloat scale = MPITextScreenScale();
return CGPointMake(round(point.x * scale) / scale,
round(point.y * scale) / scale);
}
/// ceil point value for pixel-aligned
static inline CGPoint MPITextCGPointPixelCeil(CGPoint point) {
CGFloat scale = MPITextScreenScale();
return CGPointMake(ceil(point.x * scale) / scale,
ceil(point.y * scale) / scale);
}
/// round point value to .5 pixel for path stroke (odd pixel line width pixel-aligned)
static inline CGPoint MPITextCGPointPixelHalf(CGPoint point) {
CGFloat scale = MPITextScreenScale();
return CGPointMake((floor(point.x * scale) + 0.5) / scale,
(floor(point.y * scale) + 0.5) / scale);
}
/// Returns the distance between two points.
static inline CGFloat MPITextCGPointGetDistanceToPoint(CGPoint p1, CGPoint p2) {
return sqrt((p1.x - p2.x) * (p1.x - p2.x) + (p1.y - p2.y) * (p1.y - p2.y));
}
/// floor point value for pixel-aligned
static inline CGSize MPITextCGSizePixelFloor(CGSize size) {
CGFloat scale = MPITextScreenScale();
return CGSizeMake(floor(size.width * scale) / scale,
floor(size.height * scale) / scale);
}
/// round point value for pixel-aligned
static inline CGSize MPITextCGSizePixelRound(CGSize size) {
CGFloat scale = MPITextScreenScale();
return CGSizeMake(round(size.width * scale) / scale,
round(size.height * scale) / scale);
}
/// ceil point value for pixel-aligned
static inline CGSize MPITextCGSizePixelCeil(CGSize size) {
CGFloat scale = MPITextScreenScale();
return CGSizeMake(ceil(size.width * scale) / scale,
ceil(size.height * scale) / scale);
}
/// round point value to .5 pixel for path stroke (odd pixel line width pixel-aligned)
static inline CGSize MPITextCGSizePixelHalf(CGSize size) {
CGFloat scale = MPITextScreenScale();
return CGSizeMake((floor(size.width * scale) + 0.5) / scale,
(floor(size.height * scale) + 0.5) / scale);
}
/// Returns the area of the rectangle.
static inline CGFloat MPITextCGRectGetArea(CGRect rect) {
if (CGRectIsNull(rect)) return 0;
rect = CGRectStandardize(rect);
return rect.size.width * rect.size.height;
}
/// Returns the minmium distance between a point to a rectangle.
static inline CGFloat MPITextCGPointGetDistanceToRect(CGPoint p, CGRect r) {
r = CGRectStandardize(r);
if (CGRectContainsPoint(r, p)) return 0;
CGFloat distV, distH;
if (CGRectGetMinY(r) <= p.y && p.y <= CGRectGetMaxY(r)) {
distV = 0;
} else {
distV = p.y < CGRectGetMinY(r) ? CGRectGetMinY(r) - p.y : p.y - CGRectGetMaxY(r);
}
if (CGRectGetMinX(r) <= p.x && p.x <= CGRectGetMaxX(r)) {
distH = 0;
} else {
distH = p.x < CGRectGetMinX(r) ? CGRectGetMinX(r) - p.x : p.x - CGRectGetMaxX(r);
}
return MAX(distV, distH);
}
/// floor point value for pixel-aligned
static inline CGRect MPITextCGRectPixelFloor(CGRect rect) {
CGPoint origin = MPITextCGPointPixelCeil(rect.origin);
CGPoint corner = MPITextCGPointPixelFloor(CGPointMake(rect.origin.x + rect.size.width,
rect.origin.y + rect.size.height));
CGRect ret = CGRectMake(origin.x, origin.y, corner.x - origin.x, corner.y - origin.y);
if (ret.size.width < 0) ret.size.width = 0;
if (ret.size.height < 0) ret.size.height = 0;
return ret;
}
/// round point value for pixel-aligned
static inline CGRect MPITextCGRectPixelRound(CGRect rect) {
CGPoint origin = MPITextCGPointPixelRound(rect.origin);
CGPoint corner = MPITextCGPointPixelRound(CGPointMake(rect.origin.x + rect.size.width,
rect.origin.y + rect.size.height));
return CGRectMake(origin.x, origin.y, corner.x - origin.x, corner.y - origin.y);
}
/// ceil point value for pixel-aligned
static inline CGRect MPITextCGRectPixelCeil(CGRect rect) {
CGPoint origin = MPITextCGPointPixelFloor(rect.origin);
CGPoint corner = MPITextCGPointPixelCeil(CGPointMake(rect.origin.x + rect.size.width,
rect.origin.y + rect.size.height));
return CGRectMake(origin.x, origin.y, corner.x - origin.x, corner.y - origin.y);
}
/// round point value to .5 pixel for path stroke (odd pixel line width pixel-aligned)
static inline CGRect MPITextCGRectPixelHalf(CGRect rect) {
CGPoint origin = MPITextCGPointPixelHalf(rect.origin);
CGPoint corner = MPITextCGPointPixelHalf(CGPointMake(rect.origin.x + rect.size.width,
rect.origin.y + rect.size.height));
return CGRectMake(origin.x, origin.y, corner.x - origin.x, corner.y - origin.y);
}
/// Returns the center for the rectangle.
static inline CGPoint MPITextCGRectGetCenter(CGRect rect) {
return CGPointMake(CGRectGetMidX(rect), CGRectGetMidY(rect));
}
/// floor UIEdgeInset for pixel-aligned
static inline UIEdgeInsets MPITextUIEdgeInsetPixelFloor(UIEdgeInsets insets) {
insets.top = MPITextCGFloatPixelFloor(insets.top);
insets.left = MPITextCGFloatPixelFloor(insets.left);
insets.bottom = MPITextCGFloatPixelFloor(insets.bottom);
insets.right = MPITextCGFloatPixelFloor(insets.right);
return insets;
}
/// ceil UIEdgeInset for pixel-aligned
static inline UIEdgeInsets MPITextUIEdgeInsetPixelCeil(UIEdgeInsets insets) {
insets.top = MPITextCGFloatPixelCeil(insets.top);
insets.left = MPITextCGFloatPixelCeil(insets.left);
insets.bottom = MPITextCGFloatPixelCeil(insets.bottom);
insets.right = MPITextCGFloatPixelCeil(insets.right);
return insets;
}
/// UIEdgeInsets horizontal value.
static inline CGFloat MPITextUIEdgeInsetsGetHorizontalValue(UIEdgeInsets insets) {
return insets.left + insets.right;
}
/// UIEdgeInsets vertical value.
static inline CGFloat MPITextUIEdgeInsetsGetVerticalValue(UIEdgeInsets insets) {
return insets.top + insets.bottom;
}
|
#!/bin/bash
#
# MIT License
#
# Copyright (c) 2020 engineer365.org
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
set -e
set -x
cp -r /home/vagrant/files/opt/* /opt/
cd /opt/
docker-compose up -d --remove-orphans
|
from config import JobManager
if __name__ == '__main__':
import sys
jobmng = JobManager(sys.argv[1])
jobmng.set_output_dir()
jobmng.write()
jobmng.write_run_scripts()
jobmng.write_job_scripts()
jobmng.submit()
|
<filename>lib/include/basic_math.h
#ifndef BASIC_MATH_H
#define BASIC_MATH_H
#include <types.h>
uint32_t div_u32(uint32_t v, uint32_t by);
uint32_t mod_u32(uint32_t v, uint32_t by);
#endif
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.entity.database.derby;
import java.util.Collection;
import java.util.Map;
import javax.management.ObjectName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import brooklyn.config.ConfigKey;
import brooklyn.entity.Entity;
import brooklyn.entity.basic.ConfigKeys;
import brooklyn.entity.basic.SoftwareProcess;
import brooklyn.entity.basic.SoftwareProcessImpl;
import brooklyn.entity.database.Database;
import brooklyn.entity.database.Schema;
import brooklyn.entity.java.UsesJava;
import brooklyn.entity.java.UsesJmx;
import brooklyn.event.basic.BasicAttributeSensorAndConfigKey;
import brooklyn.event.basic.BasicConfigKey;
import brooklyn.event.basic.PortAttributeSensorAndConfigKey;
import brooklyn.event.feed.jmx.JmxHelper;
import brooklyn.util.collections.MutableMap;
import brooklyn.util.flags.SetFromFlag;
import com.google.common.base.Objects.ToStringHelper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
/**
* An {@link Entity} that represents a single Derby SQL database server instance.
*
* TODO work in progress
*/
public class DerbyDatabase extends SoftwareProcessImpl implements Database, UsesJava, UsesJmx {
private static final Logger log = LoggerFactory.getLogger(DerbyDatabase.class);
@SetFromFlag("version")
public static final ConfigKey<String> SUGGESTED_VERSION =
ConfigKeys.newConfigKeyWithDefault(SoftwareProcess.SUGGESTED_VERSION, "10.8.1.2");
public static final PortAttributeSensorAndConfigKey JDBC_PORT = new PortAttributeSensorAndConfigKey(
"derby.jdbcPort", "Suggested JDBC port");
public static final ConfigKey<String> VIRTUAL_HOST_NAME = new BasicConfigKey<String>(
String.class, "derby.virtualHost", "Derby virtual host name", "localhost");
public static final BasicAttributeSensorAndConfigKey<String> JMX_USER = new BasicAttributeSensorAndConfigKey<String>(
UsesJmx.JMX_USER, "admin");
public static final BasicAttributeSensorAndConfigKey<String> JMX_PASSWORD = new BasicAttributeSensorAndConfigKey<String>(
UsesJmx.JMX_PASSWORD, "<PASSWORD>");
@SetFromFlag
protected Collection<String> schemaNames;
@SetFromFlag
protected Map<String, DerbySchema> schemas;
protected transient JmxHelper jmxHelper;
public DerbyDatabase() {
this(MutableMap.of(), null);
}
public DerbyDatabase(Map properties) {
this(properties, null);
}
public DerbyDatabase(Entity parent) {
this(MutableMap.of(), parent);
}
public DerbyDatabase(Map properties, Entity parent) {
super(properties, parent);
if (schemaNames == null) schemaNames = Lists.newArrayList();
if (schemas == null) schemas = Maps.newLinkedHashMap();
}
@Override
public Class<? extends DerbyDatabaseDriver> getDriverInterface() {
return DerbyDatabaseDriver.class;
}
@Override
public void connectSensors() {
super.connectSensors();
connectServiceUpIsRunning();
}
@Override
public void disconnectSensors() {
super.disconnectSensors();
disconnectServiceUpIsRunning();
}
@Override
public void postStart() {
super.postStart();
for (String name : schemaNames) {
createSchema(name);
}
}
@Override
public void preStop() {
super.preStop();
for (DerbySchema schema : schemas.values()) {
schema.destroy();
}
if (jmxHelper != null) jmxHelper.terminate();
}
public void createSchema(String name) {
createSchema(name, ImmutableMap.of());
}
public void createSchema(String name, Map properties) {
Map allprops = MutableMap.builder().putAll(properties).put("name", name).build();
DerbySchema schema = new DerbySchema(allprops);
schema.init();
schema.create();
schemas.put(name, schema);
}
public Collection<Schema> getSchemas() {
return ImmutableList.<Schema>copyOf(schemas.values());
}
public void addSchema(Schema schema) {
schemas.put(schema.getName(), (DerbySchema) schema);
}
public void removeSchema(String schemaName) {
schemas.remove(schemaName);
}
@Override
protected ToStringHelper toStringHelper() {
return super.toStringHelper().add("jdbcPort", getAttribute(JDBC_PORT));
}
protected boolean computeNodeUp() {
// FIXME Use the JmxAdapter.reachable() stuff instead of getAttribute
try {
ObjectName serverInfoObjectName = ObjectName.getInstance("org.apache.derby:type=ServerInformation,name=ServerInformation");
String productVersion = (String) jmxHelper.getAttribute(serverInfoObjectName, "ProductVersion");
return (productVersion != null);
} catch (Exception e) {
return false;
}
}
}
|
<reponame>bxyoung89/xorberax-visualizer<filename>js/data/palettes/helpers/gameboy-gradient.js
export default `
vec4 gameboyGradient(float stop) {
vec4 color1 = vec4(0.058823529411764705, 0.2196078431372549, 0.058823529411764705, 0.0);
vec4 color2 = vec4(0.18823529411764706, 0.3843137254901961, 0.18823529411764706, 0.0);
vec4 color3 = vec4(0.5450980392156862, 0.6745098039215687, 0.058823529411764705, 0.0);
vec4 color4 = vec4(0.6078431372549019, 0.7372549019607844, 0.058823529411764705, 0.0);
if(stop == 0.0){
return color1;
}
vec4 color = mix(color1, color2, smoothstep(0.0, .33, stop));
color = mix(color, color3, smoothstep(.33, .66, stop));
color = mix(color, color4, smoothstep(.66, 1.0, stop));
return color;
}
`; |
<filename>pkg/image/util_unix.go
//go:build !windows
// +build !windows
package image
import (
v1 "github.com/google/go-containerregistry/pkg/v1"
"github.com/rancher/wharfie/pkg/extract"
)
// extractFiles was extracted as method to assist with differences in Windows.
func extractFiles(img v1.Image, dir string) error {
return extract.Extract(img, dir)
}
|
#!/bin/bash
#SBATCH -J Act_maxout-4_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py maxout-4 1 Adagrad 3 0.4218117983099172 271 0.014031103438449315 he_normal PE-infersent
|
package utils
import play.api.libs.json.{JsError, Reads}
import play.api.mvc.{PlayBodyParsers, Results}
import slick.util.Logging
import scala.concurrent.ExecutionContext
trait ValidateJson extends Logging {
implicit def parse: PlayBodyParsers
def validateJson[A: Reads](implicit ec: ExecutionContext) = {
parse.json.validate(_.validate[A].asEither.left.map(e => Results.BadRequest(JsError.toJson(e))))
}
}
|
#pragma once
#include <clean-core/string_view.hh>
/**
* converts strings to primitive types
*
* returns true if the parsing was successful AND the whole string was consumed
*/
namespace cc
{
[[nodiscard]] bool from_string(cc::string_view s, bool& v); // "true" or "false"
[[nodiscard]] bool from_string(cc::string_view s, char& v);
[[nodiscard]] bool from_string(cc::string_view s, signed char& v);
[[nodiscard]] bool from_string(cc::string_view s, signed short& v);
[[nodiscard]] bool from_string(cc::string_view s, signed int& v);
[[nodiscard]] bool from_string(cc::string_view s, signed long& v);
[[nodiscard]] bool from_string(cc::string_view s, signed long long& v);
[[nodiscard]] bool from_string(cc::string_view s, unsigned char& v);
[[nodiscard]] bool from_string(cc::string_view s, unsigned short& v);
[[nodiscard]] bool from_string(cc::string_view s, unsigned int& v);
[[nodiscard]] bool from_string(cc::string_view s, unsigned long& v);
[[nodiscard]] bool from_string(cc::string_view s, unsigned long long& v);
[[nodiscard]] bool from_string(cc::string_view s, float& v);
[[nodiscard]] bool from_string(cc::string_view s, double& v);
}
|
package frc.robot.commands;
//import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import edu.wpi.first.wpilibj2.command.CommandBase;
import frc.robot.RobotContainer;
import frc.robot.misc.ControlChooser;
import frc.robot.subsystems.DriveTrain;
import frc.robot.Constants;
public class DriveTrain_TankDrive extends CommandBase{
//this looks useful but I literally do not think its used anywhere else
//double driveSpeed = 10; //maximum percentage of motor to use
/**
* Creates a new DriveTrain_TankDrive
*/
public DriveTrain_TankDrive(DriveTrain subsystem){
addRequirements(RobotContainer.m_driveTrain);
}
//called when the command is initially scheduled
@Override
public void initialize() {
//driveSpeed = SmartDashboard.getNumber("Speed Percentage", 100);
}
//called when the scheduler runs while the command is scheduled
@Override
public void execute() {
RobotContainer.m_driveTrain.TankDrive(
-ControlChooser.stick.getRawAxis(Constants.CANBusID.leftYAxis),
-ControlChooser.stick.getRawAxis(Constants.CANBusID.rightYAxis)
);
}
//called when the command ends or is interrupted
@Override
public void end(boolean interrupted){
}
//returns true when command should end
@Override
public boolean isFinished() {
return false;
}
}
|
<filename>staging/src/k8s.io/code-generator/cmd/applyconfiguration-gen/generators/types.go
/*
Copyright 2021 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package generators
import "k8s.io/gengo/types"
var (
applyConfiguration = types.Ref("k8s.io/apimachinery/pkg/runtime", "ApplyConfiguration")
groupVersionKind = types.Ref("k8s.io/apimachinery/pkg/runtime/schema", "GroupVersionKind")
objectMeta = types.Ref("k8s.io/apimachinery/pkg/apis/meta/v1", "ObjectMeta")
rawExtension = types.Ref("k8s.io/apimachinery/pkg/runtime", "RawExtension")
unknown = types.Ref("k8s.io/apimachinery/pkg/runtime", "Unknown")
extractInto = types.Ref("k8s.io/apimachinery/pkg/util/managedfields", "ExtractInto")
smdNewParser = types.Ref("sigs.k8s.io/structured-merge-diff/v4/typed", "NewParser")
smdParser = types.Ref("sigs.k8s.io/structured-merge-diff/v4/typed", "Parser")
yamlObject = types.Ref("sigs.k8s.io/structured-merge-diff/v4/typed", "YAMLObject")
yamlUnmarshal = types.Ref("gopkg.in/yaml.v2", "Unmarshal")
)
|
BLD="$(tput bold)"
NRM="$(tput sgr0)"
UND="$(tput smul)"
EXISTING_PATH=$(echo ${EEXT} -e ${PATH})
MACRO_HOME_DIR="${HOME}/rcd-home"
CREATE_MACRO_HOME_DIR="d"
HOME_DIR_CREATED=0
SET_LOCAL_BIN_PATH="NO"
LOCAL_SHELL="BASH"
if [[ $(ps -p $$) =~ "bash" ]]; then
EEXT="-e"
REDP="-p"
else
EEXT=""
REDP=""
fi
function steps(){
echo ${EEXT} "============================================================================================"
echo ${EEXT} ${1}
echo ${EEXT} "============================================================================================\n"
}
steps "STEP 1\t➸\t Create home directory for ${BLD}Record(rcd)${NRM}"
echo ${EEXT} "Default is ${BLD}${MACRO_HOME_DIR}${NRM}"
if [[ $(ps -p $$) =~ "bash" ]]; then
read -p "Set Manually/Set Default [m/d] : ${BLD}" CREATE_MACRO_HOME_DIR
else
read "CREATE_MACRO_HOME_DIR?Set Manually/Set Default [m/d] : ${BLD}"
fi
while [[ "${HOME_DIR_CREATED}" = 0 ]]; do
case "${CREATE_MACRO_HOME_DIR}" in
m)
echo ${EEXT} "${NRM}What is the home directory path? : ${BLD}" && read MACRO_HOME_DIR
MACRO_HOME_DIR="${MACRO_HOME_DIR}"
HOME_DIR_CREATED=1
;;
d)
echo ${EEXT} "${NRM}Setting the home directory as ${MACRO_HOME_DIR}"
HOME_DIR_CREATED=1
;;
*)
read -p "${NRM}Invalid Option. Either Set manually/Set Default [m/d] : " CREATE_MACRO_HOME_DIR
HOME_DIR_CREATED=0
;;
esac
done
if [[ ! -d ${MACRO_HOME_DIR} ]]; then
mkdir ${MACRO_HOME_DIR}
echo ${EEXT} "${NRM}Creating Macro's home directory ${MACRO_HOME_DIR}"
fi
echo ${EEXT} "${NRM}Macro's home directory has been set as ${BLD}${MACRO_HOME_DIR}${NRM}\n"
echo ${EEXT} "============================================================================================"
echo ${EEXT} "STEP 2\t➸\t Copying Record Files"
echo ${EEXT} "============================================================================================\n"
[ -d ${MACRO_HOME_DIR}/bin ] || mkdir ${MACRO_HOME_DIR}/bin && cp -f $(pwd)/src/record.sh ${MACRO_HOME_DIR}/bin/
[ -d ${MACRO_HOME_DIR}/bin/cmd ] || mkdir ${MACRO_HOME_DIR}/bin/cmd && ln -s ${MACRO_HOME_DIR}/bin/record.sh ${MACRO_HOME_DIR}/bin/cmd/rcd 2>/dev/null
echo ${EEXT} "Record files has been copied"
touch ${MACRO_HOME_DIR}/bin/record.config
echo ${EEXT} "Record config file has been copied\n"
echo ${EEXT} "============================================================================================"
echo ${EEXT} "STEP 3\t➸\t Adding Record (rcd) command to PATH"
echo ${EEXT} "============================================================================================\n"
# if [[ ! ${EXISTING_PATH} =~ "/usr/local/bin" ]]; then
# echo ${EEXT} -e "${BLD}usr/local/bin${NRM} not available in PATH."
# read -p "Shall I add the /usr/local/bin to PATH?" USER_ADD_LOCAL_BIN_TO_PATH
# fi
LOCAL_SHELL=$(echo ${EEXT} ${0})
ADDED_TO_BASH=0
ADDED_TO_ZSH=0
if test -n "$BASH_VERSION"; then
echo ${EEXT} "You are using ${BLD}bash ${BASH_VERSION}${NRM}"
while [[ ${ADDED_TO_BASH} = 0 ]]; do
if [[ ! $(echo ${EEXT} ${PATH}) =~ "${MACRO_HOME_DIR}/bin/cmd" ]]; then
echo ${EEXT} "export PATH=$PATH:${MACRO_HOME_DIR}/bin/cmd" >>${HOME}/.bash_profile
echo ${EEXT} "Added to PATH"
source ${HOME}/.bash_profile
echo ${EEXT} "BASH profile sourced\n"
else
echo ${EEXT} "Already available in PATH. Only BASH profile sourced\n"
ADDED_TO_BASH=1
fi
done
echo ${EEXT} "============================================================================================"
echo ${EEXT} "Well done! Installation completed. To know what ${BLD}Record(rcd)${NRM} can do, type ${BLD}rcd -h${NRM}"
echo ${EEXT} "============================================================================================\n"
if [[ ! $(echo ${EEXT} $PATH) =~ ${MACRO_HOME_DIR}/bin/cmd ]]; then
echo ${EEXT} "Type ${BLD} source ~/.bash_profile ${NRM}to activate the command\n"
fi
elif test -n "$ZSH_VERSION"; then
echo ${EEXT} "You are using ${BLD}zsh ${ZSH_VERSION}${NRM}"
while [[ ${ADDED_TO_ZSH} = 0 ]]; do
if [[ ! $(grep "PATH" ${HOME}/.zshrc) =~ "${MACRO_HOME_DIR}/bin/cmd" ]]; then
echo ${EEXT} "export PATH=$PATH:${MACRO_HOME_DIR}/bin/cmd" >>${HOME}/.zshrc
echo ${EEXT} "Added to PATH\n"
source ${HOME}/.zshrc
# echo ${EEXT} "ZSH Profile sourced"
else
echo ${EEXT} "Already available at export PATH in your zsh profile\n"
ADDED_TO_ZSH=1
fi
done
echo ${EEXT} "============================================================================================"
echo ${EEXT} "Well done! Installation completed. To know what ${BLD}Record(rcd)${NRM} can do, type ${BLD}rcd -h${NRM}"
echo ${EEXT} "============================================================================================\n"
if [[ ! $(echo ${EEXT} $PATH) =~ ${MACRO_HOME_DIR}/bin/cmd ]]; then
echo ${EEXT} "Type ${BLD} source ~/.zshrc ${NRM}to activate the command\n"
fi
fi
|
SELECT SUM(amount) FROM orders WHERE customer_id = 21 |
<gh_stars>0
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "otbSoilDataBase.h"
#include <fstream>
#include <sstream>
#include <algorithm>
#include <boost/algorithm/string.hpp>
#include "itkMacro.h"
namespace otb
{
SoilDataBase::SoilDataBase(const std::string& SoilFileName, double wlfactor) :
m_SoilFileName(SoilFileName), m_WlFactor(wlfactor)
{
ParseSoilFile();
};
const typename SoilDataBase::SoilDataVector& SoilDataBase::GetDB() const
{
return m_SoilDataVector;
}
double SoilDataBase::GetReflectance(size_t SoilIndex, WavelenghtType wl) const
{
if(SoilIndex >= m_SoilDataVector.size())
{
std::stringstream errmessg;
errmessg << "Soil index must be >= 0 and < " << m_SoilDataVector.size() <<
". " << SoilIndex <<" was passed.\n";
throw std::range_error( errmessg.str() );
}
// wl not in the set of measured ones
if(m_SoilDataVector[SoilIndex].find(wl)==m_SoilDataVector[SoilIndex].end())
{
const auto wlmin = m_Wavelengths[0];
const auto wlmax = m_Wavelengths[m_Wavelengths.size()-1];
if(wl<wlmin) return (*m_SoilDataVector[SoilIndex].find(wlmin)).second;
if(wl>wlmax) return (*m_SoilDataVector[SoilIndex].find(wlmax)).second;
const auto p = std::partition_point(m_Wavelengths.cbegin(), m_Wavelengths.cend(),
[&](WavelenghtType w){ return w<wl;}
);
const auto wlinf = *(p-1);
const auto wlsup = *p;
const auto factinf = wl-wlinf;
const auto factsup = wlsup-wl;
const auto vinf = (*m_SoilDataVector[SoilIndex].find(wlinf)).second;
const auto vsup = (*m_SoilDataVector[SoilIndex].find(wlsup)).second;
return (vinf*factinf+vsup*factsup)/(factinf+factsup);
}
else
{
return (*m_SoilDataVector[SoilIndex].find(wl)).second;
}
}
size_t SoilDataBase::CountColumns(std::string fileName) const
{
std::ifstream ifile(fileName);
std::string line;
if (ifile.is_open())
{
size_t nbSpaces = 0;
if(!getline(ifile,line))
{
itkGenericExceptionMacro(<< "Error reading file " << fileName);
}
ifile.close();
boost::trim(line);
auto found = line.find(' ');
while(found!=std::string::npos)
{
++nbSpaces;
while(line[found+1] == ' ') ++found;
found = line.find(' ', found+1);
}
return nbSpaces+1;
}
else
{
itkGenericExceptionMacro(<< "Could not open file " << fileName);
}
}
void SoilDataBase::ParseSoilFile()
{
auto number_of_soils = CountColumns(m_SoilFileName) - 1;
m_SoilDataVector.resize(number_of_soils);
std::ifstream sdb(m_SoilFileName);
if(! sdb.is_open())
{
itkGenericExceptionMacro(<< "Could not open file " << m_SoilFileName);
}
std::string line;
while(std::getline(sdb, line))
{
if(line.size() > 3)
{
std::stringstream ss(line);
double tmpwl;
ss >> tmpwl;
WavelenghtType wl = static_cast<WavelenghtType>(m_WlFactor*tmpwl);
m_Wavelengths.push_back(wl);
for(size_t i=0; i< number_of_soils; ++i)
{
double refl;
ss >> refl;
m_SoilDataVector[i][wl] = refl;
}
}
}
std::sort(m_Wavelengths.begin(), m_Wavelengths.end());
}
}// namespace otb
|
def update_theme_property(theme, property, value):
theme[property] = value
# Test the function
theme = {
'primary_color': 'blue',
'font_family': 'Arial',
'font_size': 14,
'layout': 'grid'
}
update_theme_property(theme, 'font_size', 16)
print(theme) # Output: {'primary_color': 'blue', 'font_family': 'Arial', 'font_size': 16, 'layout': 'grid'} |
// Define the authGuard middleware function
function authGuard($request, $response, $next)
{
// Check if the user is authenticated (e.g., using session or token-based authentication)
if ($userAuthenticated) {
// Check if the user has the necessary permissions to access the route
if ($userHasPermissions) {
// If authenticated and authorized, allow the request to proceed
return $next($request, $response);
} else {
// If not authorized, return a 403 Forbidden error response
return $response->withStatus(403)->withJson(['error' => 'Unauthorized access']);
}
} else {
// If not authenticated, return a 401 Unauthorized error response
return $response->withStatus(401)->withJson(['error' => 'Authentication required']);
}
} |
#!/bin/bash
#
# Scripts which download checkpoints for provided models, and combine them.
#
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ -z "$1" ]; then
docker_img=tensorflow/tensorflow:1.1.0-gpu-py3
docker_bin=nvidia-docker
else
docker_img=tensorflow/tensorflow:1.1.0-rc2-py3
docker_bin=docker
fi
if [[ "${OSTYPE}" == "darwin"* ]]; then
TMP_DIR="/private"$(mktemp -d)
else
TMP_DIR=$(mktemp -d)
fi
echo "${docker_bin}, docker image: ${docker_img}, temp directory: ${TMP_DIR}"
cp "$SCRIPT_DIR"/*.py "$TMP_DIR"/.
cd "$TMP_DIR"
echo "Downloading tensorflow checkpoints..."
# Download inception v3 checkpoint
wget http://download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz -q
tar -xvzf inception_v3_2016_08_28.tar.gz
rm inception_v3_2016_08_28.tar.gz
# Download adversarially trained inception v3 checkpoint
wget http://download.tensorflow.org/models/adv_inception_v3_2017_08_18.tar.gz -q
tar -xvzf adv_inception_v3_2017_08_18.tar.gz
rm adv_inception_v3_2017_08_18.tar.gz
# Download ensemble adversarially trained inception resnet v2 checkpoint
wget http://download.tensorflow.org/models/ens_adv_inception_resnet_v2_2017_08_18.tar.gz -q
tar -xvzf ens_adv_inception_resnet_v2_2017_08_18.tar.gz
rm ens_adv_inception_resnet_v2_2017_08_18.tar.gz
"$docker_bin" run -v "$TMP_DIR":/code -w /code "$docker_img" python ensemble_models_v0.py
cp "$TMP_DIR"/*.ckpt* "$SCRIPT_DIR"/.
"$docker_bin" run -v "$TMP_DIR":/code -w /code "$docker_img" ls -l
"$docker_bin" run -v "$TMP_DIR":/code -w /code "$docker_img" rm -r *
rm -rf "$TMP_DIR"
cd "$SCRIPT_DIR"
# link all checkpoints for provided models.
"$SCRIPT_DIR"/link_checkpoints.sh
|
<gh_stars>1-10
package arouter.dawn.zju.edu.module_account.util;
import android.text.TextUtils;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import arouter.dawn.zju.edu.module_account.R;
import baselib.App;
/**
* @Auther: Dawn
* @Date: 2018/11/22 22:01
* @Description:
* 验证密码以及验证码的正确性
*/
public class VerificationUtil {
/**
* 通过正则表达式验证手机号合法性
* @param mobiles 手机号
* @return 合法性
*/
public static boolean checkPhoneNumber(String mobiles){
Pattern p = Pattern.compile("^((13[0-9])|(15[^4,\\D])|(18[0,5-9]))\\d{8}$");
Matcher m = p.matcher(mobiles);
return !m.matches();
}
/**
* 验证用户传入的密码是否合法
* @param password 密码
* @param repassword <PASSWORD>
* @return 合法性
*/
public static String checkPasswordCorrect(String password, String repassword) {
if (TextUtils.isEmpty(password) || TextUtils.isEmpty(repassword)) {
return App.getAppalication().getString(R.string.set_password_password_not_null);
}
if (!password.equals(repassword)) {
return App.getAppalication().getString(R.string.set_password_password_diff);
}
if (password.length() < 8) {
return App.getAppalication().getString(R.string.set_password_password_too_short);
}
if (password.length() > 32) {
return App.getAppalication().getString(R.string.set_password_password_too_long);
}
if (!checkPasswordComplexity(password)) {
return App.getAppalication().getString(R.string.set_password_password_too_simple);
}
return "";
}
public static boolean checkCodeCorrect(String code) {
if (code == null || code.length() != 6) {
return true;
}
Pattern pattern = Pattern.compile("[0-9]*");
return !pattern.matcher(code).matches();
}
/**
* 检验密码复杂度
* @param password 密码
* @return 如果含有数字、字母或者符号两种以上则返回true
*/
private static boolean checkPasswordComplexity(String password) {
boolean hasNumber = false;
boolean hasChar = false;
boolean hasSymbol = false;
for (int i = 0; i < password.length(); i++) {
char ch = password.charAt(i);
if (Character.isDigit(ch)) {
hasNumber = true;
} else if (Character.isLowerCase(ch) || Character.isUpperCase(ch)) {
hasChar = true;
} else {
hasSymbol = true;
}
if ((hasNumber && hasChar) || (hasNumber && hasSymbol) || (hasChar && hasSymbol)) {
return true;
}
}
return false;
}
}
|
#!/bin/bash
# Script for downloading ICU4C from upstream.
pushd `dirname $0` >/dev/null
rm -rf files
curl http://download.icu-project.org/files/icu4c/57.1/icu4c-57_1-src.tgz -o icu.tgz
tar zxf icu.tgz
mv icu files
rm icu.tgz
popd >/dev/null
|
#!/usr/bin/env bash
user=$1
adduser $user
usermod -aG sudo $user
cp -R /root/.ssh /home/$user/
cp ./ssh_config /home/$user/.ssh/config
chown -R $user:$user /home/$user/.ssh
|
<filename>node_modules/react-icons-kit/md/ic_how_to_reg.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_how_to_reg = void 0;
var ic_how_to_reg = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0z",
"fill": "none",
"fill-rule": "evenodd"
},
"children": []
}, {
"name": "g",
"attribs": {
"fill-rule": "evenodd"
},
"children": [{
"name": "path",
"attribs": {
"d": "M9 17l3-2.94c-.39-.04-.68-.06-1-.06-2.67 0-8 1.34-8 4v2h9l-3-3zm2-5c2.21 0 4-1.79 4-4s-1.79-4-4-4-4 1.79-4 4 1.79 4 4 4"
},
"children": [{
"name": "path",
"attribs": {
"d": "M9 17l3-2.94c-.39-.04-.68-.06-1-.06-2.67 0-8 1.34-8 4v2h9l-3-3zm2-5c2.21 0 4-1.79 4-4s-1.79-4-4-4-4 1.79-4 4 1.79 4 4 4"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M15.47 20.5L12 17l1.4-1.41 2.07 2.08 5.13-5.17 1.4 1.41z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M15.47 20.5L12 17l1.4-1.41 2.07 2.08 5.13-5.17 1.4 1.41z"
},
"children": []
}]
}]
}]
};
exports.ic_how_to_reg = ic_how_to_reg; |
<filename>core/src/main/java/org/radargun/stages/cache/background/BackgroundStatisticsStartStage.java<gh_stars>1-10
package org.radargun.stages.cache.background;
import org.radargun.DistStageAck;
import org.radargun.config.Property;
import org.radargun.config.Stage;
import org.radargun.config.TimeConverter;
import org.radargun.stages.AbstractDistStage;
/**
* @author <NAME> <<EMAIL>>
*/
@Stage(doc = "Starts collection of statistics from background threads and cache size.")
public class BackgroundStatisticsStartStage extends AbstractDistStage {
@Property(converter = TimeConverter.class, doc = "Delay between statistics snapshots. Default is 5 seconds.")
private long statsIterationDuration = 5000;
@Override
public DistStageAck executeOnSlave() {
try {
BackgroundOpsManager instance = BackgroundOpsManager.getOrCreateInstance(slaveState, statsIterationDuration);
log.info("Starting statistics threads");
instance.startStats();
return successfulResponse();
} catch (Exception e) {
return errorResponse("Error while starting background stats", e);
}
}
}
|
#! /bin/sh
# --------------------------------------------------------------------
# -- *****************************
# -- * Trenz Electronic GmbH *
# -- * Holzweg 19A *
# -- * 32257 Bünde *
# -- * Germany *
# -- *****************************
# --------------------------------------------------------------------
# --$Autor: Hartfiel, John $
# --$Email: j.hartfiel@trenz-electronic.de $
# --$Create Date:2017/04/12 $
# --$Modify Date: 2018/05/25 $
# --$Version: 1.4 $
# -- check xilinx base install path
# --$Version: 1.3 $
# -- change Xilinx Setup files to support normal and SDX installation
# --$Version: 1.2 $
# -- export variables to be visible by Xilinx tools
# --------------------------------------------------------------------
# --------------------------------------------------------------------
function vivado_start {
echo ----------------------Change to log folder--------------------------
# vlog folder
vlog_folder=${bashfile_path}/v_log
echo ${vlog_folder}
if ! [ -d "$vlog_folder" ]; then
mkdir ${vlog_folder}
fi
cd ${vlog_folder}
# mkdir tmp
# setenv XILINX_TCLSTORE_USERAREA tmp
echo --------------------------------------------------------------------
echo -------------------------Start VIVADO scripts -----------------------
vivado -source ../scripts/script_main.tcl -mode batch -notrace -tclargs --gui 1
echo -------------------------scripts finished----------------------------
echo --------------------------------------------------------------------
echo --------------------Change to design folder-------------------------
cd ..
echo ------------------------Design finished-----------------------------
exit
}
echo ------------------------Set design paths----------------------------
# get paths
bashfile_name=${0##*/}
# bashfile_path=${0%/*}
bashfile_path=`pwd`
echo -- Run Design with: ${bashfile_name}
echo -- Use Design Path: ${bashfile_path}
echo ---------------------Load basic design settings---------------------
source $bashfile_path/design_basic_settings.sh
echo --------------------------------------------------------------------
echo ------------------Set Xilinx environment variables------------------
VIVADO_XSETTINGS=${XILDIR}/Vivado/${VIVADO_VERSION}/.settings64-Vivado.sh
SDK_XSETTINGS=${XILDIR}/SDK/${VIVADO_VERSION}/.settings64-SDK_Core_Tools.sh
LABTOOL_XSETTINGS=${XILDIR}/Vivado_Lab/${VIVADO_VERSION}/settings64.sh
if [ "${ENABLE_SDSOC}" == "" ]; then ENABLE_SDSOC=0; fi
if [ ${ENABLE_SDSOC} == 1 ]; then
echo --Info: SDSOC use Vivado and SDK from SDx installation --
echo --Info: SDSOC use Vivado and SDK from SDx installation --
SDSOC_XSETTINGS=${XILDIR}/SDx/${VIVADO_VERSION}/settings64.sh
VIVADO_XSETTINGS=${XILDIR}/Vivado/${VIVADO_VERSION}/settings64.sh
SDK_XSETTINGS=${XILDIR}/SDK/${VIVADO_VERSION}/settings64.sh
fi
# # --------------------
if [ "${VIVADO_AVAILABLE}" == "" ]; then export VIVADO_AVAILABLE=0; fi
if [ "${SDK_AVAILABLE}" == "" ]; then export SDK_AVAILABLE=0; fi
if [ "${LABTOOL_AVAILABLE}" == "" ]; then export LABTOOL_AVAILABLE=0; fi
if [ "${SDSOC_AVAILABLE}" == "" ]; then export SDSOC_AVAILABLE=0; fi
# # --------------------
echo -- Use Xilinx Version: ${VIVADO_VERSION} --
if [ "${VIVADO_XSETTINGS_ISDONE}" == "" ]; then echo --Info: Configure Xilinx Vivado Settings --
if ! [ -e "${VIVADO_XSETTINGS}" ]; then
echo -- Info: ${VIVADO_XSETTINGS} not found --
else
source ${VIVADO_XSETTINGS}
export VIVADO_AVAILABLE=1
fi
VIVADO_XSETTINGS_ISDONE=1
fi
if [ "${SDK_XSETTINGS_ISDONE}" == "" ]; then echo --Info: Configure Xilinx SDK Settings --
if ! [ -e "${SDK_XSETTINGS}" ]; then
echo -- Info: ${SDK_XSETTINGS} not found --
else
source ${SDK_XSETTINGS}
export SDK_AVAILABLE=1
fi
SDK_XSETTINGS_ISDONE=1
fi
if [ "${LABTOOL_XSETTINGS_ISDONE}" == "" ]; then echo --Info: Configure Xilinx LAbTools Settings --
if ! [ -e "${LABTOOL_XSETTINGS}" ]; then
echo -- Info: ${LABTOOL_XSETTINGS} not found --
else
source ${LABTOOL_XSETTINGS}
export LABTOOL_AVAILABLE=1
fi
LABTOOL_XSETTINGS_ISDONE=1
fi
if [ "${SDSOC_XSETTINGS_ISDONE}" == "" ] && [ ${ENABLE_SDSOC} == 1 ]; then echo --Info: Configure Xilinx SDSoC Settings --
if ! [ -e "${SDSOC_XSETTINGS}" ]; then
echo -- Info: ${SDSOC_XSETTINGS} not found --
else
source ${SDSOC_XSETTINGS}
export SDSOC_AVAILABLE=1
fi
SDSOC_XSETTINGS_ISDONE=1
fi
echo --------------------------------------------------------------------
# check important settings
if [ ${VIVADO_AVAILABLE} == 1 ]; then
vivado_start
else
echo -- Error: Need Vivado to run. --
if ! [ -e "${XILDIR}" ]; then
echo "-- Error: ${XILDIR} not found. Check path of XILDIR variable on design_basic_settings.sh (upper and lower case is important)"
fi
echo ---------------------------Error occurs-----------------------------
echo --------------------------------------------------------------------
fi
|
<reponame>rootedbox/dancing-bananas-front-end<filename>packages/client/src/components/positions/range-status.tsx<gh_stars>1-10
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { faCircle } from '@fortawesome/free-solid-svg-icons';
import classNames from 'classnames';
import { V3PositionData } from '@sommelier/shared-types/src/api';
import BigNumber from 'bignumber.js';
import './positions.scss';
export const RangeStatus = ({
position,
}: {
// position: Pick<V3PositionData, 'position'>;
position: any;
}): JSX.Element => {
// FixMe: flip tickUpper and tickLower once sub graph is synced
let isInRange = false;
const lower = new BigNumber(position?.tickUpper?.price0);
const upper = new BigNumber(position?.tickLower?.price0);
const current = new BigNumber(position?.pool?.token0Price);
if (current.gte(lower) && current.lte(upper)) {
isInRange = true;
}
return (
<div
className={classNames('range', {
'in-range': isInRange,
'out-range': !isInRange,
})}
>
<FontAwesomeIcon icon={faCircle} />
{isInRange ? 'In-range' : 'Out-of-Range'}
</div>
);
};
|
<filename>src/main/generated/com/globalcollect/gateway/sdk/java/gc/token/definitions/CustomerTokenWithContactDetails.java<gh_stars>0
package com.globalcollect.gateway.sdk.java.gc.token.definitions;
import com.globalcollect.gateway.sdk.java.gc.token.definitions.ContactDetailsToken;
import com.globalcollect.gateway.sdk.java.gc.token.definitions.CustomerToken;
public class CustomerTokenWithContactDetails extends CustomerToken {
private ContactDetailsToken contactDetails = null;
public ContactDetailsToken getContactDetails() {
return contactDetails;
}
public void setContactDetails(ContactDetailsToken value) {
this.contactDetails = value;
}
}
|
import projects from './modules/projects'
import tasks from './modules/tasks'
import {updateProgressBar} from './modules/functions/progressbar'
document.addEventListener('DOMContentLoaded', () =>{
updateProgressBar();
}); |
import sys, time, argparse
from matalg.parsers.markov import MarkovParser
from matalg.parsers.turing import TuringParser
class ErrorCode:
BAD_TIMEOUT_VALUE = -3
BAD_MODEL = -2
HELP_SHOWN = -1
SUCCESS = 0
TIMEOUT_EXCEED = 1
def main(argv=None) -> int:
arg_parser = argparse.ArgumentParser("matalg", description="Executes algorithm models.")
arg_parser.add_argument("-f", "--file", help="algorithm source file", nargs=1)
arg_parser.add_argument("-t", "--trace", help="trace execution by steps", action="store_true")
arg_parser.add_argument("-i", "--timeout", help="set timeout [default=1 sec]", nargs=1, type=float)
arg_parser.add_argument("-T", help="parse as turing machine", action="store_true")
arg_parser.add_argument("-M", help="parse as markov algorifm", action="store_true")
arg_parser.add_argument("input_string", metavar="input", type=str, nargs="?", help="input string")
if argv is None: argv = sys.argv[1:]
namespace = arg_parser.parse_args(argv)
if namespace.file is None:
arg_parser.print_help()
return ErrorCode.HELP_SHOWN
if namespace.T and namespace.M or (not namespace.T and not namespace.M):
print("Choose either Turing or Markov model.")
return ErrorCode.BAD_MODEL
if namespace.timeout is not None and namespace.timeout[0] <= 0:
print("Bad timeout value")
return ErrorCode.BAD_TIMEOUT_VALUE
timeout = 1.0 if namespace.timeout is None else namespace.timeout[0]
source = open(namespace.file[0], "r").read()
algo = (MarkovParser() if namespace.M else TuringParser()).parse_source(source)
string = input() if namespace.input_string is None else namespace.input_string
conf = algo.init_configuration(string)
if namespace.trace:
print("start: ", conf.representation())
start_time = time.time()
while time.time() - start_time < timeout:
conf = algo.make_step_into(conf)
if conf.is_final:
break
print("next: ", conf.representation())
else:
print("timeout exceed")
return ErrorCode.TIMEOUT_EXCEED
print("result:", conf.representation())
else:
try:
conf = algo.run(conf, timeout=timeout)
print(conf)
except TimeoutError:
print("timeout exceed")
return ErrorCode.TIMEOUT_EXCEED
return ErrorCode.SUCCESS
|
<reponame>Iolaum/podcust
"""Console script for podcust.
Useful documentation at:
https://click.palletsprojects.com/en/7.x/quickstart/#nesting-commands
https://click.palletsprojects.com/en/7.x/complex/
https://dev.to/drcloudycoder/develop-python-cli-with-subcommands-using-click-4892
"""
import click
from podcust.commands import demo, transmission
from podcust import __version__ as cver
@click.group()
def main(args=None):
"""Podcust commands provide a wrapper around lower level utilities
from podman, the Operating System and the container you are managing."""
click.echo("Welcome to Podman Custodian!")
@click.command()
def version():
"""Show podcust version."""
print(f"Current podcust version is: {cver}")
main.add_command(demo.demo)
main.add_command(transmission.transmission)
main.add_command(version)
|
#!/usr/bin/env bash
set -euo pipefail
echo "SHELLINABOX - Start Installation"
echo "SHELLINABOX - Readying entropy..."
touch ~/.rnd
systemctl enable --now haveged
echo "SHELLINABOX - Generating certifactes..."
openssl req \
-new \
-newkey rsa:4096 \
-days 365 \
-nodes \
-x509 \
-subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" \
-keyout /etc/ssl/private/bastion-selfsigned.key \
-out /etc/ssl/certs/bastion-selfsigned.cert
echo "SHELLINABOX - Generating Diffie-Hellman parameters..."
openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048
echo "SHELLINABOX - Enabling Apache2 modules and configs..."
modules=(
ssl
headers
rewrite
proxy
proxy_http
proxy_balancer
lbmethod_byrequests
)
for mod in ${modules[@]}; do
a2enmod ${mod}
done
a2ensite default-ssl
a2enconf ssl-params
echo "SHELLINABOX - Restarting services..."
systemctl enable apache2 shellinabox
systemctl reload-or-restart apache2 shellinabox
echo "SHELLINABOX - Installation Complete"
|
#! /usr/bin/env bash
set -e
export MY_ORG=$(echo "${GITHUB_REPOSITORY}" | awk -F / '{print tolower($1)}')
export DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
export BUILD_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
export COMMIT=$(echo "$SHA" | cut -c -7)
echo "Source branch is $GITHUB_HEAD_REF"
echo "Target branch is $GITHUB_BASE_REF"
if [ "$GITHUB_BASE_REF" = "develop" ]; then
export MY_APP=$(echo $(echo "${GITHUB_REPOSITORY}" | awk -F / '{print $2}')"-develop")
elif [ "$GITHUB_BASE_REF" = "main" ] || [ "$GITHUB_BASE_REF" = "master" ]; then
export MY_APP=$(echo "${GITHUB_REPOSITORY}" | awk -F / '{print $2}')
else
echo "Target branch must be develop, main, or master";
exit 1
fi
if [[ ( $GITHUB_BASE_REF = "main" || $GITHUB_BASE_REF = "master" ) && $GITHUB_HEAD_REF != "develop" ]]; then
echo "Must merge PRs to develop before merging to main/master";
exit 1
else
echo "$DOCKER_TOKEN" | docker login ghcr.io -u "$DOCKER_ACTOR" --password-stdin
docker build --build-arg BUILD_DATE="$DATE" \
--build-arg COMMIT="$COMMIT" \
--build-arg BRANCH="$GITHUB_HEAD_REF" \
--build-arg PULL_REQUEST="$PR" \
--label us.kbase.vcs-commit="$COMMIT" \
--label us.kbase.vcs-pull-req="$PR" \
-t ghcr.io/"$MY_ORG"/"$MY_APP":"pr-""$PR" .
docker push ghcr.io/"$MY_ORG"/"$MY_APP":"pr-""$PR"
fi
|
#!/usr/bin/env bash
trap 'rm -rf "${WORKDIR}"' EXIT
[[ -z "${WORKDIR}" || "${WORKDIR}" != "/tmp/"* || ! -d "${WORKDIR}" ]] && WORKDIR="$(mktemp -d)"
[[ -z "${CURRENT_DIR}" || ! -d "${CURRENT_DIR}" ]] && CURRENT_DIR=$(pwd)
[[ -z "${MY_SHELL_SCRIPTS}" ]] && MY_SHELL_SCRIPTS="$HOME/.dotfiles"
# Load custom functions
if type 'colorEcho' 2>/dev/null | grep -q 'function'; then
:
else
if [[ -s "${MY_SHELL_SCRIPTS}/custom_functions.sh" ]]; then
source "${MY_SHELL_SCRIPTS}/custom_functions.sh"
else
echo "${MY_SHELL_SCRIPTS}/custom_functions.sh does not exist!"
exit 0
fi
fi
# Package managers with pacman-style command syntax
if [[ ! -x "$(command -v pacman)" ]]; then
[[ -s "${MY_SHELL_SCRIPTS}/installer/pacman_installer.sh" ]] && source "${MY_SHELL_SCRIPTS}/installer/pacman_installer.sh"
fi
if [[ ! -x "$(command -v pacman)" ]]; then
colorEcho "${FUCHSIA}pacapt or pacaptr${RED} is not installed!"
exit 1
fi
[[ -z "${CURL_CHECK_OPTS[*]}" ]] && Get_Installer_CURL_Options
# Install ZSH Shell
if [[ -x "$(command -v pacman)" ]]; then
colorEcho "${BLUE}Updating ${FUCHSIA}installed packages${BLUE}..."
sudo pacman --noconfirm -Syu
colorEcho "${BLUE}Installing ${FUCHSIA}pre-requisite packages${BLUE}..."
## Install Latest Git ( Git 2.x ) on CentOS 7
## https://computingforgeeks.com/how-to-install-latest-version-of-git-git-2-x-on-centos-7/
# sudo dnf -y remove git
# sudo dnf -y install https://packages.endpoint.com/rhel/7/os/x86_64/endpoint-repo-1.7-1.x86_64.rpm
# sudo dnf -y install git
# GeoIP binary and database
# http://kbeezie.com/geoiplookup-command-line/
# autojump
# https://github.com/wting/autojump
# jq
# https://stedolan.github.io/jq/
# Pre-requisite packages
PackagesList=(
curl
wget
git
zip
unzip
# autojump
# autojump-zsh
bind-utils
binutils
build-essential
connect-proxy
coreutils
dnsutils
autoconf
g++
gcc
geoip
geoip-bin
geoip-data
geoip-database
GeoIP
GeoIP-data
glibc-langpack-en
glibc-locale-source
bc
dstat
htop
inxi
jq
lsof
make
man
man-pages-zh-CN
man-pages-zh_cn
manpages-zh
mtr
multitail
netcat-openbsd
nmap
python3
recode
rlwrap
rsync
screen
strace
telnet
# tmux
xsel
xmlstarlet
aria2
axel
)
for TargetPackage in "${PackagesList[@]}"; do
if checkPackageNeedInstall "${TargetPackage}"; then
colorEcho "${BLUE} Installing ${FUCHSIA}${TargetPackage}${BLUE}..."
sudo pacman --noconfirm -S "${TargetPackage}"
fi
done
fi
## https://github.com/man-pages-zh/manpages-zh
sudo localedef -i zh_CN -c -f UTF-8 -A /usr/share/locale/locale.alias zh_CN.UTF-8
# alias man="LC_MESSAGES=zh_CN.UTF-8 man"
# alias man="man -Lzh_CN"
## keep SSH env when using `sudo -i`
## https://mwl.io/archives/1000
## sudo visudo -f /etc/sudoers.d/keep_env_via_ssh
# echo 'Defaults env_keep += "SSH_CLIENT SSH_CONNECTION SSH_TTY SSH_AUTH_SOCK"' \
# | sudo tee "/etc/sudoers.d/keep_env_via_ssh" >/dev/null
colorEcho "${BLUE}Installing ${FUCHSIA}ZSH ${BLUE}Shell..."
# http://zsh.sourceforge.net/
if [[ ! -x "$(command -v zsh)" ]]; then
RHEL_VERSION=$(cat /etc/os-release | grep "^VERSION=" | cut -d'"' -f2)
if [[ "${RHEL_VERSION}" == "7" ]]; then
## install latest zsh for readhat 7 & centos 7
## sudo dnf -y remove zsh
# if checkPackageInstalled "zsh"; then
# CURRENT_VERSION=$(zsh --version | grep -Eo -m1 '([0-9]{1,}\.)+[0-9]{1,}' | head -n1)
# colorEcho "${BLUE} Removing ${FUCHSIA}zsh ${YELLOW}${CURRENT_VERSION}${BLUE}..."
# sudo pacman --noconfirm -R zsh
# fi
# sudo dnf -y update && sudo dnf -y install ncurses-devel gcc make
PackagesList=(
ncurses-devel
gcc
make
)
for TargetPackage in "${PackagesList[@]}"; do
if checkPackageNeedInstall "${TargetPackage}"; then
colorEcho "${BLUE} Installing ${FUCHSIA}${TargetPackage}${BLUE}..."
sudo pacman --noconfirm -S "${TargetPackage}"
fi
done
# ZSH_REPO_VERSION=$(dnf info zsh | grep -E "[Vv]ersion" | grep -Eo '([0-9]{1,}\.)+[0-9]{1,}')
REMOTE_VERSION=$(curl "${CURL_CHECK_OPTS[@]}" http://zsh.sourceforge.net/News/ \
| grep -Eo -m1 'Release ([0-9]{1,}\.)+[0-9]{1,}' | head -n1)
REMOTE_VERSION=$(echo "$REMOTE_VERSION" | grep -Eo '([0-9]{1,}\.)+[0-9]{1,}')
if [[ -n "$REMOTE_VERSION" ]]; then
DOWNLOAD_URL="https://nchc.dl.sourceforge.net/project/zsh/zsh/${REMOTE_VERSION}/zsh-${REMOTE_VERSION}.tar.xz"
colorEcho "${BLUE} From ${ORANGE}${DOWNLOAD_URL}"
curl "${CURL_DOWNLOAD_OPTS[@]}" -o "${WORKDIR}/zsh.tar.xz" "${DOWNLOAD_URL}" && \
tar -xJf "${WORKDIR}/zsh.tar.xz" -C "${WORKDIR}" && \
mv "${WORKDIR}"/zsh-* "${WORKDIR}/zsh" && \
cd "${WORKDIR}/zsh" && \
sudo ./configure >/dev/null && \
sudo make >/dev/null && \
sudo make install >/dev/null
fi
if [[ ! -x "$(command -v zsh)" ]] && [[ -s "/usr/local/bin/zsh" ]]; then
sudo ln -sv /usr/local/bin/zsh /bin/zsh
fi
if [[ -x "$(command -v zsh)" ]]; then
if [[ ! -f "/bin/zsh" ]]; then
sudo ln -sv "$(command -v zsh)" /bin/zsh
fi
command -v zsh | sudo tee -a /etc/shells
fi
else
if checkPackageNeedInstall "zsh"; then
sudo pacman --noconfirm -S zsh
fi
fi
fi
if [[ ! -x "$(command -v zsh)" ]]; then
colorEcho "${FUCHSIA}ZSH${RED} is not installed! Please manual install ${FUCHSIA}ZSH${RED}!"
exit
fi
if [[ -s "${MY_SHELL_SCRIPTS:-$HOME/.dotfiles}/git/git_global_config.sh" ]]; then
source "${MY_SHELL_SCRIPTS:-$HOME/.dotfiles}/git/git_global_config.sh"
fi
# change default shell to zsh
# chsh -s $(which zsh)
sudo mkdir -p "/usr/local/share/zsh/site-functions"
# Launch ZSH in BASH
OS_INFO_WSL=$(uname -r)
# WSL1 & WSL2
if [[ "${OS_INFO_WSL}" =~ "Microsoft" || "${OS_INFO_WSL}" =~ "microsoft" ]]; then
if ! grep -q "exec zsh" "$HOME/.bashrc" 2>/dev/null; then
tee -a ~/.bashrc >/dev/null <<-'EOF'
# Launch ZSH
if [[ "${ZSH_VERSION:-unset}" = "unset" ]]; then
export SHELL=$(which zsh)
exec zsh
fi
EOF
fi
fi
## Install oh-my-zsh
if [[ -d "$HOME/.oh-my-zsh" ]]; then
colorEcho "${BLUE}Updating ${FUCHSIA}oh-my-zsh${BLUE}..."
cd "$HOME/.oh-my-zsh" && git pull
else
colorEcho "${BLUE}Installing ${FUCHSIA}oh-my-zsh${BLUE}..."
bash -c "$(curl -fsSL https://raw.githubusercontent.com/robbyrussell/oh-my-zsh/master/tools/install.sh)"
fi
cd "${CURRENT_DIR}" || exit |
<gh_stars>1-10
// Code generated by go run zgen.go. DO NOT EDIT.
//go:build darwin
// +build darwin
package types
// Pointer is an opaque reference that satisfies all interfaces in this package.
type Pointer uintptr
// Pointer returns the underlying opaque pointer value.
func (p Pointer) Pointer() uintptr { return uintptr(p) }
// privateAnyObject implements the AnyObject interface.
func (p Pointer) privateAnyObject() {}
// privateCFType implements the CFType interface.
func (p Pointer) privateCFType() {}
// privateCFAllocator implements the CFAllocator interface.
func (p Pointer) privateCFAllocator() {}
// privateCFArray implements the CFArray interface.
func (p Pointer) privateCFArray() {}
// privateCFData implements the CFData interface.
func (p Pointer) privateCFData() {}
// privateCFDictionary implements the CFDictionary interface.
func (p Pointer) privateCFDictionary() {}
// privateCFMutableArray implements the CFMutableArray interface.
func (p Pointer) privateCFMutableArray() {}
// privateCFRunLoop implements the CFRunLoop interface.
func (p Pointer) privateCFRunLoop() {}
// privateCFString implements the CFString interface.
func (p Pointer) privateCFString() {}
// privateConstFSEventStreamRef implements the ConstFSEventStreamRef interface.
func (p Pointer) privateConstFSEventStreamRef() {}
// privateFSEventStreamRef implements the FSEventStreamRef interface.
func (p Pointer) privateFSEventStreamRef() {}
// AnyObject is an opaque reference to AnyObject type.
type AnyObject interface {
Pointer() uintptr
privateAnyObject()
}
// CFType is an opaque reference to CFType type.
type CFType interface {
Pointer() uintptr
privateCFType()
}
// CFAllocator is an opaque reference to CFAllocator type.
type CFAllocator interface {
CFType
privateCFAllocator()
}
// CFArray is an opaque reference to CFArray type.
type CFArray interface {
AnyObject
CFType
privateCFArray()
}
// CFData is an opaque reference to CFData type.
type CFData interface {
AnyObject
CFType
privateCFData()
}
// CFDictionary is an opaque reference to CFDictionary type.
type CFDictionary interface {
AnyObject
CFType
privateCFDictionary()
}
// CFMutableArray is an opaque reference to CFMutableArray type.
type CFMutableArray interface {
AnyObject
CFArray
privateCFMutableArray()
}
// CFRunLoop is an opaque reference to CFRunLoop type.
type CFRunLoop interface {
CFType
privateCFRunLoop()
}
// CFString is an opaque reference to CFString type.
type CFString interface {
AnyObject
CFType
privateCFString()
}
// ConstFSEventStreamRef is an opaque reference to ConstFSEventStreamRef type.
type ConstFSEventStreamRef interface {
Pointer() uintptr
privateConstFSEventStreamRef()
}
// FSEventStreamRef is an opaque reference to FSEventStreamRef type.
type FSEventStreamRef interface {
ConstFSEventStreamRef
privateFSEventStreamRef()
}
|
#
# -*- coding: utf-8 -*-
#
# Copyright (c) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: EPL-2.0
#
"""
Converts the Medical Decathlon raw 3D Nifti files into
2D NumPy files for easier use in TensorFlow/Keras 2D models.
You'll need to download the raw dataset from
the Medical Decathlon website (http://medicaldecathlon.com),
extract the data (untar), and run this script.
The raw dataset has the CC-BY-SA 4.0 license.
https://creativecommons.org/licenses/by-sa/4.0/
For BraTS (Task 1):
INPUT CHANNELS: "modality": {
"0": "FLAIR",
"1": "T1w",
"2": "t1gd",
"3": "T2w"
},
LABEL_CHANNELS: "labels": {
"0": "background",
"1": "edema",
"2": "non-enhancing tumor",
"3": "enhancing tumour"
}
"""
import os
import nibabel as nib # pip install nibabel
import numpy as np
from tqdm import tqdm # pip install tqdm
import json
import settings
def crop_center(img, cropx, cropy, cropz):
"""
Take a center crop of the images.
If we are using a 2D model, then we'll just stack the
z dimension.
"""
x, y, z, c = img.shape
# Make sure starting index is >= 0
startx = max(x // 2 - (cropx // 2), 0)
starty = max(y // 2 - (cropy // 2), 0)
startz = max(z // 2 - (cropz // 2), 0)
# Make sure ending index is <= size
endx = min(startx + cropx, x)
endy = min(starty + cropy, y)
endz = min(startz + cropz, z)
return img[startx:endx, starty:endy, startz:endz, :]
def normalize_img(img):
"""
Normalize the pixel values.
This is one of the most important preprocessing steps.
We need to make sure that the pixel values have a mean of 0
and a standard deviation of 1 to help the model to train
faster and more accurately.
"""
for channel in range(img.shape[3]):
img[:, :, :, channel] = (
img[:, :, :, channel] - np.mean(img[:, :, :, channel])) \
/ np.std(img[:, :, :, channel])
return img
def preprocess_inputs(img, resize):
"""
Process the input images
For BraTS subset:
INPUT CHANNELS: "modality": {
"0": "FLAIR", T2-weighted-Fluid-Attenuated Inversion Recovery MRI
"1": "T1w", T1-weighted MRI
"2": "t1gd", T1-gadolinium contrast MRI
"3": "T2w" T2-weighted MRI
}
"""
while len(img.shape) < 4: # Make sure 4D
img = np.expand_dims(img, -1)
if resize != -1:
img = crop_center(img, resize, resize, resize)
img = normalize_img(img)
img = np.swapaxes(np.array(img), 0, -2)
return img
def preprocess_labels(msk, num_labels, resize):
"""
Process the ground truth labels
For BraTS subset:
LABEL_CHANNELS: "labels": {
"0": "background", No tumor
"1": "edema", Swelling around tumor
"2": "non-enhancing tumor", Tumor that isn't enhanced by Gadolinium contrast
"3": "enhancing tumour" Gadolinium contrast enhanced regions
}
"""
while len(msk.shape) < 4: # Make sure 4D
msk = np.expand_dims(msk, -1)
if resize != -1:
msk = crop_center(msk, resize, resize, resize)
msk = np.swapaxes(np.array(msk), 0, -2)
return msk
def save_img_msk(idx_array, name, fileIdx, save_dir, dataDir, num_labels, resize):
"""
Save the image and mask in a numpy file
"""
for idx in tqdm(idx_array):
image_file = fileIdx[idx]["image"]
label_file = fileIdx[idx]["label"]
bratsname = os.path.splitext(os.path.basename(image_file))[0]
bratsname = os.path.splitext(bratsname)[0]
data_filename = os.path.join(dataDir, image_file)
img = np.array(nib.load(data_filename).dataobj)
img = preprocess_inputs(img, resize)
num_rows = img.shape[0]
data_filename = os.path.join(dataDir, label_file)
msk = np.array(nib.load(data_filename).dataobj)
msk = preprocess_labels(msk, num_labels, resize)
for idy in range(num_rows):
np.savez(os.path.join(save_dir, name,
"{}_{:03d}".format(bratsname, idy)),
img=img[idy],
msk=msk[idy])
def convert_raw_data_to_numpy(trainIdx, validateIdx, testIdx,
dataDir, json_data, save_dir, resize=-1):
"""
Go through the Decathlon dataset.json file.
We've already split into training and validation subsets.
Read in Nifti format files. Crop images and masks.
This code is will convert the 3D images and masks
into a stack of 2D slices.
"""
num_labels = len(json_data["labels"])
fileIdx = json_data["training"]
# Create directory
try:
os.makedirs(save_dir)
except OSError:
if not os.path.isdir(save_dir):
raise
# Save training set images
print("Step 1 of 3. Save training set 3D scans to 2D slices.")
dirname = "train"
try:
os.makedirs(os.path.join(save_dir, dirname))
except OSError:
if not os.path.isdir(save_dir):
raise
save_img_msk(trainIdx, dirname, fileIdx, save_dir, dataDir, num_labels, resize)
# Save testing set images
print("Step 2 of 3. Save testing set 3D scans to 2D slices.")
dirname = "testing"
try:
os.makedirs(os.path.join(save_dir, dirname))
except OSError:
if not os.path.isdir(save_dir):
raise
save_img_msk(testIdx, dirname, fileIdx, save_dir, dataDir, num_labels, resize)
# Save validation set images
print("Step 3 of 3. Save validation set 3D scans to 2D slices.")
dirname = "validation"
try:
os.makedirs(os.path.join(save_dir, dirname))
except OSError:
if not os.path.isdir(save_dir):
raise
save_img_msk(validateIdx, dirname, fileIdx, save_dir, dataDir, num_labels, resize)
print("Finished processing.")
if __name__ == "__main__":
from argparser import args
print(args)
"""
Get the training file names from the data directory.
Decathlon should always have a dataset.json file in the
subdirectory which lists the experiment information including
the input and label filenames.
"""
json_filename = os.path.join(args.original_data_path, "dataset.json")
try:
with open(json_filename, "r") as fp:
experiment_data = json.load(fp)
except IOError as e:
print("File {} doesn't exist. It should be part of the "
"Decathlon directory".format(json_filename))
# Print information about the Decathlon experiment data
print("*" * 30)
print("=" * 30)
print("Dataset name: ", experiment_data["name"])
print("Dataset description: ", experiment_data["description"])
print("Tensor image size: ", experiment_data["tensorImageSize"])
print("Dataset release: ", experiment_data["release"])
print("Dataset reference: ", experiment_data["reference"])
print("Dataset license: ", experiment_data["licence"]) # sic
print("=" * 30)
print("*" * 30)
"""
Randomize the file list. Then separate into training and
validation lists. We won't use the testing set since we
don't have ground truth masks for this; instead we'll
split the validation set into separate test and validation
sets.
"""
# Set the random seed so that always get same random mix
np.random.seed(args.seed)
numFiles = experiment_data["numTraining"]
idxList = np.arange(numFiles) # List of file indices
randomList = np.random.random(numFiles) # List of random numbers
# Random number go from 0 to 1. So anything above
# args.train_split is in the validation list.
trainList = idxList[randomList < args.split]
otherList = idxList[randomList >= args.split]
randomList = np.random.random(len(otherList)) # List of random numbers
validateList = otherList[randomList >= 0.5]
testList = otherList[randomList < 0.5]
convert_raw_data_to_numpy(trainList, validateList, testList,
args.original_data_path,
experiment_data,
args.data_path,
args.resize)
|
import * as NS from '../../namespace';
export * from './data';
export * from './edit';
export function setDefaultDocumentTitle(): NS.ISetDefaultDocumentTitle {
return { type: 'ORDER_BOOK:SET_DEFAULT_DOCUMENT_TITLE' };
}
|
<gh_stars>0
/*
* Copyright 2018 ABSA Group Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package za.co.absa.abris.avro.parsing
import java.lang.{Boolean, Double, Float, Long}
import java.nio.ByteBuffer
import java.util.{ArrayList, Arrays, HashMap}
import org.apache.avro.generic.GenericRecord
import org.apache.spark.sql.catalyst.expressions.GenericRow
import org.scalatest.FlatSpec
import za.co.absa.abris.avro.format.ScalaAvroRecord
import za.co.absa.abris.examples.data.generation.{AvroDataUtils, FixedString, TestSchemas}
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection._
import scala.collection.immutable.Map
class AvroToSparkParserSpec extends FlatSpec {
private val avroParser = new AvroToSparkParser()
behavior of "AvroToSparkParser"
it should "support native types" in {
val testData = Map[String, Object](
"string" -> "A Test String",
"float" -> new Float(Float.MAX_VALUE),
"int" -> new Integer(Integer.MAX_VALUE),
"long" -> new Long(Long.MAX_VALUE),
"double" -> new Double(Double.MAX_VALUE),
"boolean" -> new Boolean(true))
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.NATIVE_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (entry <- testData) {
assert(assertEquals(entry._2, resultRow.getAs(entry._1)), s"${entry._1} did not match")
}
}
it should "support union with NULL in native types" in {
val testData = Map[String, Object](
"string" -> null,
"float" -> null,
"int" -> null,
"long" -> null,
"double" -> null,
"boolean" -> null)
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.NATIVE_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (i <- 0 until resultRow.size) {
assert(resultRow.get(i) == null)
}
}
it should "support array type" in {
val testData = Map[String, Object](
"array" -> new ArrayList(Arrays.asList("elem1", "elem2")))
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.ARRAY_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (entry <- testData) {
assert(assertEquals(entry._2, resultRow.getAs(entry._1)), s"${entry._1} did not match")
}
}
it should "support map type" in {
val map = new HashMap[String, java.util.ArrayList[Long]]()
map.put("entry1", new ArrayList(java.util.Arrays.asList(new Long(1), new Long(2))))
map.put("entry2", new ArrayList(java.util.Arrays.asList(new Long(3), new Long(4))))
val testData = Map[String, Object](
"map" -> map)
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.MAP_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (entry <- testData) {
assert(assertEquals(entry._2, resultRow.getAs(entry._1)), s"${entry._1} did not match")
}
}
it should "support bytes type" in {
val testData = Map[String, Object](
"bytes" -> ByteBuffer.wrap("ASimpleString".getBytes))
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.BYTES_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (entry <- testData) {
assert(assertEquals(entry._2, resultRow.getAs(entry._1)), s"${entry._1} did not match")
}
}
it should "support fixed type" in {
val testData = Map[String, Object](
"fixed" -> new FixedString("ASimpleString"))
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.FIXED_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (entry <- testData) {
assert(assertEquals(entry._2, resultRow.getAs(entry._1)), s"${entry._1} did not match")
}
}
it should "support decimal type" in {
val testData = Map[String, Object](
"decimal" -> ByteBuffer.wrap("1".getBytes))
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.DECIMAL_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (entry <- testData) {
assert(assertEquals(entry._2, resultRow.getAs(entry._1)), s"${entry._1} did not match")
}
}
it should "support date type as Integer" in {
val testData = Map[String, Object](
"date" -> new Integer(Integer.MAX_VALUE))
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.DATE_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (entry <- testData) {
assert(assertEquals(entry._2, resultRow.getAs(entry._1)), s"${entry._1} did not match")
}
}
it should "support millisecond type" in {
val testData = Map[String, Object](
"millisecond" -> new Integer(Integer.MAX_VALUE))
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.MILLISECOND_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (entry <- testData) {
assert(assertEquals(entry._2, resultRow.getAs(entry._1)), s"${entry._1} did not match")
}
}
it should "support microsecond type" in {
val testData = Map[String, Object](
"microsecond" -> new Long(Long.MAX_VALUE))
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.MICROSECOND_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (entry <- testData) {
assert(assertEquals(entry._2, resultRow.getAs(entry._1)), s"${entry._1} did not match")
}
}
it should "support timestamp millis type" in {
val testData = Map[String, Object](
"timestampMillis" -> new Long(Long.MAX_VALUE))
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.TIMESTAMP_MILLIS_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (entry <- testData) {
assert(assertEquals(entry._2, resultRow.getAs(entry._1)), s"${entry._1} did not match")
}
}
it should "support timestamp micros type" in {
val testData = Map[String, Object](
"timestampMicros" -> new Long(Long.MAX_VALUE))
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.TIMESTAMP_MICROS_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (entry <- testData) {
assert(assertEquals(entry._2, resultRow.getAs(entry._1)), s"${entry._1} did not match")
}
}
it should "support duration type" in {
val testData = Map[String, Object](
"duration" -> new FixedString("111111111111"))
val avroRecord = AvroDataUtils.mapToGenericRecord(testData, TestSchemas.DURATION_MICROS_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
for (entry <- testData) {
assert(assertEquals(entry._2, resultRow.getAs(entry._1)), s"${entry._1} did not match")
}
}
it should "convert Avro's GenericRecord to Row using informed NESTED Schema" in {
case class Street(name: String, zip: String)
case class Neighborhood(name: String, streets: List[Street])
case class City(name: String, neighborhoods: Array[Neighborhood])
case class State(name: String, regions: Map[String, List[City]])
val street1 = Map("name" -> "first street name", "zip" -> "140 000-00")
val street2 = Map("name" -> "second street name", "zip" -> "240 100-00")
val street3 = Map("name" -> "third street name", "zip" -> "340 000-00")
val street4 = Map("name" -> "fourth street name", "zip" -> "480 100-00")
val street5 = Map("name" -> "fifth street name", "zip" -> "580 100-00")
val street6 = Map("name" -> "sixth street name", "zip" -> "680 100-00")
val street7 = Map("name" -> "seventh street name", "zip" -> "780 100-00")
val street8 = Map("name" -> "eigth street name", "zip" -> "880 100-00")
val neighborhood1 = Map(
"name" -> "A neighborhood",
"streets" -> new ArrayList(java.util.Arrays.asList(
AvroDataUtils.mapToGenericRecord(street1, TestSchemas.COMPLEX_SCHEMA_STREET_SPEC),
AvroDataUtils.mapToGenericRecord(street2, TestSchemas.COMPLEX_SCHEMA_STREET_SPEC))))
val neighborhood2 = Map(
"name" -> "B neighborhood",
"streets" -> new ArrayList(java.util.Arrays.asList(
AvroDataUtils.mapToGenericRecord(street3, TestSchemas.COMPLEX_SCHEMA_STREET_SPEC),
AvroDataUtils.mapToGenericRecord(street4, TestSchemas.COMPLEX_SCHEMA_STREET_SPEC))))
val neighborhood3 = Map(
"name" -> "C neighborhood",
"streets" -> new ArrayList(java.util.Arrays.asList(
AvroDataUtils.mapToGenericRecord(street5, TestSchemas.COMPLEX_SCHEMA_STREET_SPEC),
AvroDataUtils.mapToGenericRecord(street6, TestSchemas.COMPLEX_SCHEMA_STREET_SPEC))))
val neighborhood4 = Map(
"name" -> "D neighborhood",
"streets" -> new ArrayList(java.util.Arrays.asList(
AvroDataUtils.mapToGenericRecord(street7, TestSchemas.COMPLEX_SCHEMA_STREET_SPEC),
AvroDataUtils.mapToGenericRecord(street8, TestSchemas.COMPLEX_SCHEMA_STREET_SPEC))))
val city1 = Map(
"name" -> "first city",
"neighborhoods" -> new ArrayList(java.util.Arrays.asList(
AvroDataUtils.mapToGenericRecord(neighborhood1, TestSchemas.COMPLEX_SCHEMA_NEIGHBORHOOD_SPEC),
AvroDataUtils.mapToGenericRecord(neighborhood2, TestSchemas.COMPLEX_SCHEMA_NEIGHBORHOOD_SPEC))))
val city2 = Map(
"name" -> "second city",
"neighborhoods" -> new ArrayList(java.util.Arrays.asList(
AvroDataUtils.mapToGenericRecord(neighborhood3, TestSchemas.COMPLEX_SCHEMA_NEIGHBORHOOD_SPEC),
AvroDataUtils.mapToGenericRecord(neighborhood4, TestSchemas.COMPLEX_SCHEMA_NEIGHBORHOOD_SPEC))))
val cityList = new ArrayList(java.util.Arrays.asList(
AvroDataUtils.mapToGenericRecord(city1, TestSchemas.COMPLEX_SCHEMA_CITY_SPEC),
AvroDataUtils.mapToGenericRecord(city2, TestSchemas.COMPLEX_SCHEMA_CITY_SPEC)
))
val cities = new HashMap[String, Object]()
cities.put("cities", cityList)
val state = Map(
"name" -> "A State",
"regions" -> cities)
val avroRecord = AvroDataUtils.mapToGenericRecord(state, TestSchemas.COMPLEX_SCHEMA_SPEC)
val resultRow = avroParser.parse(avroRecord)
val cityMap: scala.collection.mutable.HashMap[String,Object] = resultRow.getAs("regions")
for (i <- 0 until 2) {
val record = cityMap.get("cities").get.asInstanceOf[scala.collection.mutable.ListBuffer[GenericRecord]].get(i)
assert(cityList.get(i).toString() == record.toString())
}
}
private def assertEquals(original: Any, retrieved: Any) = {
original match {
case value: java.util.ArrayList[Object] => retrieved.asInstanceOf[mutable.ListBuffer[Any]].toList == original.asInstanceOf[java.util.ArrayList[Any]].toList
case value: java.util.HashSet[Object] => retrieved.asInstanceOf[mutable.ListBuffer[Any]].toList == original.asInstanceOf[java.util.Set[Any]].toList
case value: java.util.HashMap[String,Object] => {
val retrievedMap = retrieved.asInstanceOf[mutable.HashMap[Any, Any]]
val scalaMap = original.asInstanceOf[java.util.HashMap[Any, java.util.ArrayList[Any]]].asScala
for ((key,value) <- scalaMap.iterator) {
val seq1 = retrievedMap.get(key).get.asInstanceOf[Seq[Any]].toArray
val seq2 = value.toArray()
if (seq1 != seq2) {
false
}
}
true
}
case value: FixedString => {
val str1 = new String(original.asInstanceOf[FixedString].bytes())
val str2 = new String(retrieved.asInstanceOf[Array[Byte]])
str1 == str2
}
case value: ScalaAvroRecord => {
val originalRecord = original.asInstanceOf[ScalaAvroRecord]
val retrievedRecord = retrieved.asInstanceOf[GenericRow]
for (i <- 0 until originalRecord.getValues().length) {
if (originalRecord.get(i) != retrievedRecord.get(i)) {
false
}
}
true
}
case value: ByteBuffer => {
val str1 = new String(original.asInstanceOf[ByteBuffer].array())
val str2 = new String(retrieved.asInstanceOf[Array[Byte]])
str1 == str2
}
case _ => original == retrieved
}
}
} |
//import SwingTwistChainSolver from './SwingTwistChainSolver';
import SwingTwistEndsSolver from './SwingTwistEndsSolver';
import SwingTwistSolver from './SwingTwistSolver';
import HipSolver from './HipSolver';
import LimbSolver from './LimbSolver';
import ZSolver from './ZSolver';
import ArcSolver from './ArcSolver';
import ArcSinSolver from './ArcSinSolver';
import PistonSolver from './PistonSolver';
import SpringSolver from './SpringSolver';
import TrapezoidSolver from './TrapezoidSolver';
import FabrikSolver from './FabrikSolver';
import CatenarySolver from './CatenarySolver';
import NaturalCCDSolver from './NaturalCCDSolver';
export {
//SwingTwistChainSolver,
SwingTwistEndsSolver,
SwingTwistSolver,
HipSolver,
LimbSolver,
ZSolver,
ArcSolver,
ArcSinSolver,
PistonSolver,
SpringSolver,
TrapezoidSolver,
FabrikSolver,
CatenarySolver,
NaturalCCDSolver,
}; |
package cyclops.stream.operator;
import static cyclops.reactive.companion.Spouts.of;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItems;
import static org.junit.Assert.assertThat;
import cyclops.container.immutable.impl.Seq;
import cyclops.container.immutable.impl.Vector;
import cyclops.reactive.ReactiveSeq;
import cyclops.reactive.companion.Spouts;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
import org.junit.Before;
import org.junit.Test;
public class WindowingPushTest {
ReactiveSeq<Integer> empty;
ReactiveSeq<Integer> nonEmpty;
@Before
public void setup() {
empty = of();
nonEmpty = of(1);
}
@Test
public void windowWhile() {
assertThat(of(1,
2,
3,
4,
5,
6).groupedWhile(i -> i % 3 != 0)
.toList()
.size(),
equalTo(2));
assertThat(of(1,
2,
3,
4,
5,
6).groupedWhile(i -> i % 3 != 0)
.toList()
.get(0),
equalTo(Seq.of(1,
2,
3)));
}
@Test
public void windowUntil() {
assertThat(of(1,
2,
3,
4,
5,
6).groupedUntil(i -> i % 3 == 0)
.toList()
.size(),
equalTo(2));
assertThat(of(1,
2,
3,
4,
5,
6).groupedUntil(i -> i % 3 == 0)
.toList()
.get(0),
equalTo(Seq.of(1,
2,
3)));
}
@Test
public void windowUntilEmpty() {
assertThat(Spouts.<Integer>of().groupedUntil(i -> i % 3 == 0)
.toList()
.size(),
equalTo(0));
}
@Test
public void windowStatefullyUntil() {
System.out.println(of(1,
2,
3,
4,
5,
6).groupedUntil((s, i) -> s.containsValue(4) ? true : false)
.toList());
assertThat(of(1,
2,
3,
4,
5,
6).groupedUntil((s, i) -> s.containsValue(4) ? true : false)
.toList()
.size(),
equalTo(2));
}
@Test
public void windowStatefullyUntilEmpty() {
assertThat(of().groupedUntil((s, i) -> s.contains(4) ? true : false)
.toList()
.size(),
equalTo(0));
}
@Test
public void windowStatefullyWhile() {
System.out.println(of(1,
2,
3,
4,
5,
6).groupedWhile((s, i) -> s.containsValue(4) ? true : false)
.toList());
assertThat(of(1,
2,
3,
4,
5,
6).groupedWhile((s, i) -> s.containsValue(4) ? true : false)
.toList()
.size(),
equalTo(4));
}
@Test
public void windowStatefullyWhileEmpty() {
assertThat(of().groupedWhile((s, i) -> s.contains(4) ? true : false)
.toList()
.size(),
equalTo(0));
}
@Test
public void sliding() {
List<Seq<Integer>> list = of(1,
2,
3,
4,
5,
6).sliding(2)
.collect(Collectors.toList());
assertThat(list.get(0),
hasItems(1,
2));
assertThat(list.get(1),
hasItems(2,
3));
}
@Test
public void slidingIncrement() {
List<Seq<Integer>> list = of(1,
2,
3,
4,
5,
6).sliding(3,
2)
.collect(Collectors.toList());
System.out.println(list);
assertThat(list.get(0),
hasItems(1,
2,
3));
assertThat(list.get(1),
hasItems(3,
4,
5));
}
@Test
public void grouped() {
List<Vector<Integer>> list = of(1,
2,
3,
4,
5,
6).grouped(3)
.collect(Collectors.toList());
System.out.println(list);
assertThat(list.get(0),
hasItems(1,
2,
3));
assertThat(list.get(1),
hasItems(4,
5,
6));
}
@Test
public void sliding2() {
List<Seq<Integer>> sliding = of(1,
2,
3,
4,
5).sliding(2)
.toList();
assertThat(sliding,
contains(Seq.of(1,
2),
Seq.of(2,
3),
Seq.of(3,
4),
Seq.of(4,
5)));
}
@Test
public void slidingOverlap() {
List<Seq<Integer>> sliding = of(1,
2,
3,
4,
5).sliding(3,
2)
.toList();
assertThat(sliding,
contains(Seq.of(1,
2,
3),
Seq.of(3,
4,
5)));
}
@Test
public void slidingEmpty() {
System.out.println("List " + of().sliding(1)
.toList());
assertThat(of().sliding(1)
.toList()
.size(),
equalTo(0));
}
@Test
public void slidingWithSmallWindowAtEnd() {
List<Seq<Integer>> sliding = of(1,
2,
3,
4,
5).sliding(2,
2)
.toList();
assertThat(sliding,
contains(Seq.of(1,
2),
Seq.of(3,
4),
Seq.of(5)));
}
@Test
public void slidingWithSmallWindowAtEndIterative() {
Iterator<Seq<Integer>> it = of(1,
2,
3,
4,
5).sliding(2,
2)
.iterator();
List<Seq<Integer>> sliding = ReactiveSeq.fromIterator(it)
.toList();
assertThat(sliding,
contains(Seq.of(1,
2),
Seq.of(3,
4),
Seq.of(5)));
}
@Test
public void groupedOnEmpty() throws Exception {
assertThat(empty.grouped(10)
.count(),
equalTo(0l));
}
@Test
public void groupedEmpty0() throws Exception {
empty.grouped(0)
.toList();
assertThat(of(1,
2,
3).grouped(0)
.toList(),
equalTo(Arrays.asList(Vector.of(1,
2,
3))));
}
@Test
public void grouped0() throws Exception {
nonEmpty.grouped(0)
.toList();
}
@Test
public void groupedEmpty() throws Exception {
assertThat(empty.grouped(1)
.count(),
equalTo(0l));
}
@Test
public void groupedInfinite() {
ReactiveSeq<Integer> infinite = Spouts.iterate(1,
i -> i + 1);
final ReactiveSeq<Vector<Integer>> grouped = infinite.grouped(3);
assertThat(grouped.elementAt(0)
.toOptional()
.get(),
equalTo(Vector.of(1,
2,
3)));
}
}
|
// Import requirements
const express = require('express');
const path = require('path');
const bodyParser = require('body-parser');
const jsonParser = bodyParser.json();
const cors = require('cors');
const logger = require('morgan');
// Import routes
const userRoutes = require('./routes/users.router');
// Instantiate app
const app = express();
// Apply middlewares
app.use(cors());
app.use(jsonParser);
app.use(logger('dev'));
// Define Interaction Definitions
app.set('view engine', 'ejs');
console.log('DIRECTORY NAME', __dirname);
app.use(express.static(__dirname + '/assets'));
app.set('views', path.resolve(__dirname, 'views'));
// Forward requests to specified routes
app.use('/users', userRoutes);
// Export app
module.exports = app;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.