text stringlengths 1 1.05M |
|---|
<filename>app/src/main/java/com/navjacinthmathew/mvpdemo/adapter/EmployeeAdapter.java
package com.navjacinthmathew.mvpdemo.adapter;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.navjacinthmathew.mvpdemo.R;
import com.navjacinthmathew.mvpdemo.model.EmployeeModel;
import java.util.ArrayList;
/**
* Created by <NAME> on 4/19/2017.
*/
public class EmployeeAdapter extends RecyclerView.Adapter<EmployeeAdapter.EmployeeViewHolder> {
private ArrayList<EmployeeModel> dataList;
public EmployeeAdapter(ArrayList<EmployeeModel> dataList) {
this.dataList = dataList;
}
@Override
public EmployeeViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
LayoutInflater layoutInflater = LayoutInflater.from(parent.getContext());
View view = layoutInflater.inflate(R.layout.row_employee, parent, false);
return new EmployeeViewHolder(view);
}
@Override
public void onBindViewHolder(EmployeeViewHolder holder, int position) {
holder.txtEmpName.setText(dataList.get(position).getName());
holder.txtEmpEmail.setText(dataList.get(position).getEmailId());
}
@Override
public int getItemCount() {
return dataList.size();
}
class EmployeeViewHolder extends RecyclerView.ViewHolder {
TextView txtEmpName, txtEmpEmail;
EmployeeViewHolder(View itemView) {
super(itemView);
txtEmpEmail = (TextView) itemView.findViewById(R.id.txt_name_email);
txtEmpName = (TextView) itemView.findViewById(R.id.txt_emp_name);
}
}
}
|
import tensorflow as tf
from tensorflow.keras import layers
# Build the autoencoder model
input_layer = layers.Input(shape=(28, 28, 1))
# Encoding layers
encoder_layer_1 = layers.Conv2D(64 , (3,3), activation='relu', padding='SAME', strides=2)(input_layer)
encoder_layer_2 = layers.Conv2D(32 , (3,3), activation='relu', padding='SAME', strides=2)(encoder_layer_1)
encoder_layer_3 = layers.Conv2D(16 , (3,3), activation='relu', padding='SAME', strides=2)(encoder_layer_2)
encoded = layers.Conv2D(3 , (3,3), activation='relu', padding='SAME', strides=2)(encoder_layer_3)
# Decoding layers
decoder_layer_1 = layers.Conv2DTranspose(3 , (3,3), activation='relu', padding='SAME', strides=2)(encoded)
decoder_layer_2 = layers.Conv2DTranspose(16 , (3,3), activation='relu', padding='SAME', strides=2)(decoder_layer_1)
decoder_layer_3 = layers.Conv2DTranspose(32 , (3,3), activation='relu', padding='SAME', strides=2)(decoder_layer_2)
decoder_layer_4 = layers.Conv2DTranspose(64 , (3,3), activation='relu', padding='SAME', strides=2)(decoder_layer_3)
outputs = layers.Conv2DTranspose(1 , (3,3), activation='sigmoid', padding='SAME', strides=1)(decoder_layer_4)
# Creating the model
model = tf.keras.Model(input_layer, outputs)
# Compile & Train the model
model.compile(optimizer='adam', loss='binary_crossentropy')
model.fit(x_train, x_train, epochs=10) |
#!/bin/bash
#
# Copyright (c) 2017, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
#
# All rights reserved.
#
# The Astrobee platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# short help
usage_string="$scriptname [-h] [-p <install path>]"
#[-t make_target]
usage()
{
echo "usage: sysinfo_page [[[-f file ] [-i]] | [-h]]"
}
while [ "$1" != "" ]; do
case $1 in
-p | --path ) shift
install_path=$1
;;
-h | --help ) usage
exit
;;
* ) usage
exit 1
esac
shift
done
echo "Installing in: "${install_path:-/usr/local}
echo "Install the required dependencies"
sudo apt install -y build-essential cmake git pkg-config libgtk-3-dev \
libavcodec-dev libavformat-dev libswscale-dev libv4l-dev \
libxvidcore-dev libx264-dev libjpeg-dev libpng-dev libtiff-dev \
gfortran openexr libatlas-base-dev python3-dev python3-numpy \
libtbb2 libtbb-dev libdc1394-22-dev
echo "Downloading OpenCV repo & switching to 3.3.1 branch"
mkdir ~/opencv_build && cd ~/opencv_build
git clone https://github.com/opencv/opencv.git
cd opencv && git checkout 3.3.1 && cd ..
git clone https://github.com/opencv/opencv_contrib.git
cd opencv_contrib && git checkout 3.3.1 && cd ..
echo "Building OpenCV"
cd ~/opencv_build/opencv
mkdir build && cd build
cmake -D CMAKE_BUILD_TYPE=RELEASE \
-D CMAKE_INSTALL_PREFIX=${install_path:-/usr/local} \
-D INSTALL_C_EXAMPLES=ON \
-D INSTALL_PYTHON_EXAMPLES=ON \
-D OPENCV_GENERATE_PKGCONFIG=ON \
-D OPENCV_EXTRA_MODULES_PATH=~/opencv_build/opencv_contrib/modules \
-D BUILD_EXAMPLES=ON \
-D OPENCV_ENABLED_NONFREE=YES \
-D ENABLE_PRECOMPILED_HEADERS=OFF ..
make -j8
echo "Installing OpenCV"
sudo make install
echo "Version installed:"
pkg-config --modversion opencv
|
<filename>pkg/controller/mizar/builtins.pb.go
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: builtins.proto
package mizar
import (
context "context"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
type CodeType int32
const (
CodeType_OK CodeType = 0
CodeType_TEMP_ERROR CodeType = 1
CodeType_PERM_ERROR CodeType = 2
)
var CodeType_name = map[int32]string{
0: "OK",
1: "TEMP_ERROR",
2: "PERM_ERROR",
}
var CodeType_value = map[string]int32{
"OK": 0,
"TEMP_ERROR": 1,
"PERM_ERROR": 2,
}
func (x CodeType) String() string {
return proto.EnumName(CodeType_name, int32(x))
}
func (CodeType) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_54ab063ef2b175be, []int{0}
}
type BuiltinsNodeMessage struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Ip string `protobuf:"bytes,2,opt,name=ip,proto3" json:"ip,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *BuiltinsNodeMessage) Reset() { *m = BuiltinsNodeMessage{} }
func (m *BuiltinsNodeMessage) String() string { return proto.CompactTextString(m) }
func (*BuiltinsNodeMessage) ProtoMessage() {}
func (*BuiltinsNodeMessage) Descriptor() ([]byte, []int) {
return fileDescriptor_54ab063ef2b175be, []int{0}
}
func (m *BuiltinsNodeMessage) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_BuiltinsNodeMessage.Unmarshal(m, b)
}
func (m *BuiltinsNodeMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_BuiltinsNodeMessage.Marshal(b, m, deterministic)
}
func (m *BuiltinsNodeMessage) XXX_Merge(src proto.Message) {
xxx_messageInfo_BuiltinsNodeMessage.Merge(m, src)
}
func (m *BuiltinsNodeMessage) XXX_Size() int {
return xxx_messageInfo_BuiltinsNodeMessage.Size(m)
}
func (m *BuiltinsNodeMessage) XXX_DiscardUnknown() {
xxx_messageInfo_BuiltinsNodeMessage.DiscardUnknown(m)
}
var xxx_messageInfo_BuiltinsNodeMessage proto.InternalMessageInfo
func (m *BuiltinsNodeMessage) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *BuiltinsNodeMessage) GetIp() string {
if m != nil {
return m.Ip
}
return ""
}
type BuiltinsPodMessage struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
HostIp string `protobuf:"bytes,2,opt,name=host_ip,json=hostIp,proto3" json:"host_ip,omitempty"`
Namespace string `protobuf:"bytes,3,opt,name=namespace,proto3" json:"namespace,omitempty"`
Tenant string `protobuf:"bytes,4,opt,name=tenant,proto3" json:"tenant,omitempty"`
Labels string `protobuf:"bytes,5,opt,name=labels,proto3" json:"labels,omitempty"`
ArktosNetwork string `protobuf:"bytes,6,opt,name=arktos_network,json=arktosNetwork,proto3" json:"arktos_network,omitempty"`
Phase string `protobuf:"bytes,7,opt,name=phase,proto3" json:"phase,omitempty"`
Interfaces []*InterfacesMessage `protobuf:"bytes,8,rep,name=interfaces,proto3" json:"interfaces,omitempty"`
Vpc string `protobuf:"bytes,9,opt,name=vpc,proto3" json:"vpc,omitempty"`
Subnet string `protobuf:"bytes,10,opt,name=subnet,proto3" json:"subnet,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *BuiltinsPodMessage) Reset() { *m = BuiltinsPodMessage{} }
func (m *BuiltinsPodMessage) String() string { return proto.CompactTextString(m) }
func (*BuiltinsPodMessage) ProtoMessage() {}
func (*BuiltinsPodMessage) Descriptor() ([]byte, []int) {
return fileDescriptor_54ab063ef2b175be, []int{1}
}
func (m *BuiltinsPodMessage) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_BuiltinsPodMessage.Unmarshal(m, b)
}
func (m *BuiltinsPodMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_BuiltinsPodMessage.Marshal(b, m, deterministic)
}
func (m *BuiltinsPodMessage) XXX_Merge(src proto.Message) {
xxx_messageInfo_BuiltinsPodMessage.Merge(m, src)
}
func (m *BuiltinsPodMessage) XXX_Size() int {
return xxx_messageInfo_BuiltinsPodMessage.Size(m)
}
func (m *BuiltinsPodMessage) XXX_DiscardUnknown() {
xxx_messageInfo_BuiltinsPodMessage.DiscardUnknown(m)
}
var xxx_messageInfo_BuiltinsPodMessage proto.InternalMessageInfo
func (m *BuiltinsPodMessage) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *BuiltinsPodMessage) GetHostIp() string {
if m != nil {
return m.HostIp
}
return ""
}
func (m *BuiltinsPodMessage) GetNamespace() string {
if m != nil {
return m.Namespace
}
return ""
}
func (m *BuiltinsPodMessage) GetTenant() string {
if m != nil {
return m.Tenant
}
return ""
}
func (m *BuiltinsPodMessage) GetLabels() string {
if m != nil {
return m.Labels
}
return ""
}
func (m *BuiltinsPodMessage) GetArktosNetwork() string {
if m != nil {
return m.ArktosNetwork
}
return ""
}
func (m *BuiltinsPodMessage) GetPhase() string {
if m != nil {
return m.Phase
}
return ""
}
func (m *BuiltinsPodMessage) GetInterfaces() []*InterfacesMessage {
if m != nil {
return m.Interfaces
}
return nil
}
func (m *BuiltinsPodMessage) GetVpc() string {
if m != nil {
return m.Vpc
}
return ""
}
func (m *BuiltinsPodMessage) GetSubnet() string {
if m != nil {
return m.Subnet
}
return ""
}
type BuiltinsServiceMessage struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
ArktosNetwork string `protobuf:"bytes,2,opt,name=arktos_network,json=arktosNetwork,proto3" json:"arktos_network,omitempty"`
Namespace string `protobuf:"bytes,3,opt,name=namespace,proto3" json:"namespace,omitempty"`
Tenant string `protobuf:"bytes,4,opt,name=tenant,proto3" json:"tenant,omitempty"`
Ip string `protobuf:"bytes,5,opt,name=ip,proto3" json:"ip,omitempty"`
Vpc string `protobuf:"bytes,6,opt,name=vpc,proto3" json:"vpc,omitempty"`
Subnet string `protobuf:"bytes,7,opt,name=subnet,proto3" json:"subnet,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *BuiltinsServiceMessage) Reset() { *m = BuiltinsServiceMessage{} }
func (m *BuiltinsServiceMessage) String() string { return proto.CompactTextString(m) }
func (*BuiltinsServiceMessage) ProtoMessage() {}
func (*BuiltinsServiceMessage) Descriptor() ([]byte, []int) {
return fileDescriptor_54ab063ef2b175be, []int{2}
}
func (m *BuiltinsServiceMessage) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_BuiltinsServiceMessage.Unmarshal(m, b)
}
func (m *BuiltinsServiceMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_BuiltinsServiceMessage.Marshal(b, m, deterministic)
}
func (m *BuiltinsServiceMessage) XXX_Merge(src proto.Message) {
xxx_messageInfo_BuiltinsServiceMessage.Merge(m, src)
}
func (m *BuiltinsServiceMessage) XXX_Size() int {
return xxx_messageInfo_BuiltinsServiceMessage.Size(m)
}
func (m *BuiltinsServiceMessage) XXX_DiscardUnknown() {
xxx_messageInfo_BuiltinsServiceMessage.DiscardUnknown(m)
}
var xxx_messageInfo_BuiltinsServiceMessage proto.InternalMessageInfo
func (m *BuiltinsServiceMessage) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *BuiltinsServiceMessage) GetArktosNetwork() string {
if m != nil {
return m.ArktosNetwork
}
return ""
}
func (m *BuiltinsServiceMessage) GetNamespace() string {
if m != nil {
return m.Namespace
}
return ""
}
func (m *BuiltinsServiceMessage) GetTenant() string {
if m != nil {
return m.Tenant
}
return ""
}
func (m *BuiltinsServiceMessage) GetIp() string {
if m != nil {
return m.Ip
}
return ""
}
func (m *BuiltinsServiceMessage) GetVpc() string {
if m != nil {
return m.Vpc
}
return ""
}
func (m *BuiltinsServiceMessage) GetSubnet() string {
if m != nil {
return m.Subnet
}
return ""
}
type BuiltinsServiceEndpointMessage struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"`
Tenant string `protobuf:"bytes,3,opt,name=tenant,proto3" json:"tenant,omitempty"`
BackendIps []string `protobuf:"bytes,4,rep,name=backend_ips,json=backendIps,proto3" json:"backend_ips,omitempty"`
Ports []*PortsMessage `protobuf:"bytes,5,rep,name=ports,proto3" json:"ports,omitempty"`
BackendIpsJson string `protobuf:"bytes,6,opt,name=backend_ips_json,json=backendIpsJson,proto3" json:"backend_ips_json,omitempty"`
PortsJson string `protobuf:"bytes,7,opt,name=ports_json,json=portsJson,proto3" json:"ports_json,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *BuiltinsServiceEndpointMessage) Reset() { *m = BuiltinsServiceEndpointMessage{} }
func (m *BuiltinsServiceEndpointMessage) String() string { return proto.CompactTextString(m) }
func (*BuiltinsServiceEndpointMessage) ProtoMessage() {}
func (*BuiltinsServiceEndpointMessage) Descriptor() ([]byte, []int) {
return fileDescriptor_54ab063ef2b175be, []int{3}
}
func (m *BuiltinsServiceEndpointMessage) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_BuiltinsServiceEndpointMessage.Unmarshal(m, b)
}
func (m *BuiltinsServiceEndpointMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_BuiltinsServiceEndpointMessage.Marshal(b, m, deterministic)
}
func (m *BuiltinsServiceEndpointMessage) XXX_Merge(src proto.Message) {
xxx_messageInfo_BuiltinsServiceEndpointMessage.Merge(m, src)
}
func (m *BuiltinsServiceEndpointMessage) XXX_Size() int {
return xxx_messageInfo_BuiltinsServiceEndpointMessage.Size(m)
}
func (m *BuiltinsServiceEndpointMessage) XXX_DiscardUnknown() {
xxx_messageInfo_BuiltinsServiceEndpointMessage.DiscardUnknown(m)
}
var xxx_messageInfo_BuiltinsServiceEndpointMessage proto.InternalMessageInfo
func (m *BuiltinsServiceEndpointMessage) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *BuiltinsServiceEndpointMessage) GetNamespace() string {
if m != nil {
return m.Namespace
}
return ""
}
func (m *BuiltinsServiceEndpointMessage) GetTenant() string {
if m != nil {
return m.Tenant
}
return ""
}
func (m *BuiltinsServiceEndpointMessage) GetBackendIps() []string {
if m != nil {
return m.BackendIps
}
return nil
}
func (m *BuiltinsServiceEndpointMessage) GetPorts() []*PortsMessage {
if m != nil {
return m.Ports
}
return nil
}
func (m *BuiltinsServiceEndpointMessage) GetBackendIpsJson() string {
if m != nil {
return m.BackendIpsJson
}
return ""
}
func (m *BuiltinsServiceEndpointMessage) GetPortsJson() string {
if m != nil {
return m.PortsJson
}
return ""
}
type BuiltinsArktosMessage struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Vpc string `protobuf:"bytes,2,opt,name=vpc,proto3" json:"vpc,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *BuiltinsArktosMessage) Reset() { *m = BuiltinsArktosMessage{} }
func (m *BuiltinsArktosMessage) String() string { return proto.CompactTextString(m) }
func (*BuiltinsArktosMessage) ProtoMessage() {}
func (*BuiltinsArktosMessage) Descriptor() ([]byte, []int) {
return fileDescriptor_54ab063ef2b175be, []int{4}
}
func (m *BuiltinsArktosMessage) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_BuiltinsArktosMessage.Unmarshal(m, b)
}
func (m *BuiltinsArktosMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_BuiltinsArktosMessage.Marshal(b, m, deterministic)
}
func (m *BuiltinsArktosMessage) XXX_Merge(src proto.Message) {
xxx_messageInfo_BuiltinsArktosMessage.Merge(m, src)
}
func (m *BuiltinsArktosMessage) XXX_Size() int {
return xxx_messageInfo_BuiltinsArktosMessage.Size(m)
}
func (m *BuiltinsArktosMessage) XXX_DiscardUnknown() {
xxx_messageInfo_BuiltinsArktosMessage.DiscardUnknown(m)
}
var xxx_messageInfo_BuiltinsArktosMessage proto.InternalMessageInfo
func (m *BuiltinsArktosMessage) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *BuiltinsArktosMessage) GetVpc() string {
if m != nil {
return m.Vpc
}
return ""
}
type BuiltinsNetworkPolicyMessage struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"`
Tenant string `protobuf:"bytes,3,opt,name=tenant,proto3" json:"tenant,omitempty"`
Policy string `protobuf:"bytes,4,opt,name=policy,proto3" json:"policy,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *BuiltinsNetworkPolicyMessage) Reset() { *m = BuiltinsNetworkPolicyMessage{} }
func (m *BuiltinsNetworkPolicyMessage) String() string { return proto.CompactTextString(m) }
func (*BuiltinsNetworkPolicyMessage) ProtoMessage() {}
func (*BuiltinsNetworkPolicyMessage) Descriptor() ([]byte, []int) {
return fileDescriptor_54ab063ef2b175be, []int{5}
}
func (m *BuiltinsNetworkPolicyMessage) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_BuiltinsNetworkPolicyMessage.Unmarshal(m, b)
}
func (m *BuiltinsNetworkPolicyMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_BuiltinsNetworkPolicyMessage.Marshal(b, m, deterministic)
}
func (m *BuiltinsNetworkPolicyMessage) XXX_Merge(src proto.Message) {
xxx_messageInfo_BuiltinsNetworkPolicyMessage.Merge(m, src)
}
func (m *BuiltinsNetworkPolicyMessage) XXX_Size() int {
return xxx_messageInfo_BuiltinsNetworkPolicyMessage.Size(m)
}
func (m *BuiltinsNetworkPolicyMessage) XXX_DiscardUnknown() {
xxx_messageInfo_BuiltinsNetworkPolicyMessage.DiscardUnknown(m)
}
var xxx_messageInfo_BuiltinsNetworkPolicyMessage proto.InternalMessageInfo
func (m *BuiltinsNetworkPolicyMessage) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *BuiltinsNetworkPolicyMessage) GetNamespace() string {
if m != nil {
return m.Namespace
}
return ""
}
func (m *BuiltinsNetworkPolicyMessage) GetTenant() string {
if m != nil {
return m.Tenant
}
return ""
}
func (m *BuiltinsNetworkPolicyMessage) GetPolicy() string {
if m != nil {
return m.Policy
}
return ""
}
type BuiltinsNamespaceMessage struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Tenant string `protobuf:"bytes,2,opt,name=tenant,proto3" json:"tenant,omitempty"`
Labels string `protobuf:"bytes,3,opt,name=labels,proto3" json:"labels,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *BuiltinsNamespaceMessage) Reset() { *m = BuiltinsNamespaceMessage{} }
func (m *BuiltinsNamespaceMessage) String() string { return proto.CompactTextString(m) }
func (*BuiltinsNamespaceMessage) ProtoMessage() {}
func (*BuiltinsNamespaceMessage) Descriptor() ([]byte, []int) {
return fileDescriptor_54ab063ef2b175be, []int{6}
}
func (m *BuiltinsNamespaceMessage) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_BuiltinsNamespaceMessage.Unmarshal(m, b)
}
func (m *BuiltinsNamespaceMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_BuiltinsNamespaceMessage.Marshal(b, m, deterministic)
}
func (m *BuiltinsNamespaceMessage) XXX_Merge(src proto.Message) {
xxx_messageInfo_BuiltinsNamespaceMessage.Merge(m, src)
}
func (m *BuiltinsNamespaceMessage) XXX_Size() int {
return xxx_messageInfo_BuiltinsNamespaceMessage.Size(m)
}
func (m *BuiltinsNamespaceMessage) XXX_DiscardUnknown() {
xxx_messageInfo_BuiltinsNamespaceMessage.DiscardUnknown(m)
}
var xxx_messageInfo_BuiltinsNamespaceMessage proto.InternalMessageInfo
func (m *BuiltinsNamespaceMessage) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *BuiltinsNamespaceMessage) GetTenant() string {
if m != nil {
return m.Tenant
}
return ""
}
func (m *BuiltinsNamespaceMessage) GetLabels() string {
if m != nil {
return m.Labels
}
return ""
}
type PortsMessage struct {
FrontendPort string `protobuf:"bytes,1,opt,name=frontend_port,json=frontendPort,proto3" json:"frontend_port,omitempty"`
BackendPort string `protobuf:"bytes,2,opt,name=backend_port,json=backendPort,proto3" json:"backend_port,omitempty"`
Protocol string `protobuf:"bytes,3,opt,name=protocol,proto3" json:"protocol,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *PortsMessage) Reset() { *m = PortsMessage{} }
func (m *PortsMessage) String() string { return proto.CompactTextString(m) }
func (*PortsMessage) ProtoMessage() {}
func (*PortsMessage) Descriptor() ([]byte, []int) {
return fileDescriptor_54ab063ef2b175be, []int{7}
}
func (m *PortsMessage) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_PortsMessage.Unmarshal(m, b)
}
func (m *PortsMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_PortsMessage.Marshal(b, m, deterministic)
}
func (m *PortsMessage) XXX_Merge(src proto.Message) {
xxx_messageInfo_PortsMessage.Merge(m, src)
}
func (m *PortsMessage) XXX_Size() int {
return xxx_messageInfo_PortsMessage.Size(m)
}
func (m *PortsMessage) XXX_DiscardUnknown() {
xxx_messageInfo_PortsMessage.DiscardUnknown(m)
}
var xxx_messageInfo_PortsMessage proto.InternalMessageInfo
func (m *PortsMessage) GetFrontendPort() string {
if m != nil {
return m.FrontendPort
}
return ""
}
func (m *PortsMessage) GetBackendPort() string {
if m != nil {
return m.BackendPort
}
return ""
}
func (m *PortsMessage) GetProtocol() string {
if m != nil {
return m.Protocol
}
return ""
}
type InterfacesMessage struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Ip string `protobuf:"bytes,2,opt,name=ip,proto3" json:"ip,omitempty"`
Subnet string `protobuf:"bytes,3,opt,name=subnet,proto3" json:"subnet,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *InterfacesMessage) Reset() { *m = InterfacesMessage{} }
func (m *InterfacesMessage) String() string { return proto.CompactTextString(m) }
func (*InterfacesMessage) ProtoMessage() {}
func (*InterfacesMessage) Descriptor() ([]byte, []int) {
return fileDescriptor_54ab063ef2b175be, []int{8}
}
func (m *InterfacesMessage) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_InterfacesMessage.Unmarshal(m, b)
}
func (m *InterfacesMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_InterfacesMessage.Marshal(b, m, deterministic)
}
func (m *InterfacesMessage) XXX_Merge(src proto.Message) {
xxx_messageInfo_InterfacesMessage.Merge(m, src)
}
func (m *InterfacesMessage) XXX_Size() int {
return xxx_messageInfo_InterfacesMessage.Size(m)
}
func (m *InterfacesMessage) XXX_DiscardUnknown() {
xxx_messageInfo_InterfacesMessage.DiscardUnknown(m)
}
var xxx_messageInfo_InterfacesMessage proto.InternalMessageInfo
func (m *InterfacesMessage) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *InterfacesMessage) GetIp() string {
if m != nil {
return m.Ip
}
return ""
}
func (m *InterfacesMessage) GetSubnet() string {
if m != nil {
return m.Subnet
}
return ""
}
type ReturnCode struct {
Code CodeType `protobuf:"varint,1,opt,name=code,proto3,enum=mizar.CodeType" json:"code,omitempty"`
Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ReturnCode) Reset() { *m = ReturnCode{} }
func (m *ReturnCode) String() string { return proto.CompactTextString(m) }
func (*ReturnCode) ProtoMessage() {}
func (*ReturnCode) Descriptor() ([]byte, []int) {
return fileDescriptor_54ab063ef2b175be, []int{9}
}
func (m *ReturnCode) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ReturnCode.Unmarshal(m, b)
}
func (m *ReturnCode) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ReturnCode.Marshal(b, m, deterministic)
}
func (m *ReturnCode) XXX_Merge(src proto.Message) {
xxx_messageInfo_ReturnCode.Merge(m, src)
}
func (m *ReturnCode) XXX_Size() int {
return xxx_messageInfo_ReturnCode.Size(m)
}
func (m *ReturnCode) XXX_DiscardUnknown() {
xxx_messageInfo_ReturnCode.DiscardUnknown(m)
}
var xxx_messageInfo_ReturnCode proto.InternalMessageInfo
func (m *ReturnCode) GetCode() CodeType {
if m != nil {
return m.Code
}
return CodeType_OK
}
func (m *ReturnCode) GetMessage() string {
if m != nil {
return m.Message
}
return ""
}
func init() {
proto.RegisterEnum("mizar.CodeType", CodeType_name, CodeType_value)
proto.RegisterType((*BuiltinsNodeMessage)(nil), "mizar.BuiltinsNodeMessage")
proto.RegisterType((*BuiltinsPodMessage)(nil), "mizar.BuiltinsPodMessage")
proto.RegisterType((*BuiltinsServiceMessage)(nil), "mizar.BuiltinsServiceMessage")
proto.RegisterType((*BuiltinsServiceEndpointMessage)(nil), "mizar.BuiltinsServiceEndpointMessage")
proto.RegisterType((*BuiltinsArktosMessage)(nil), "mizar.BuiltinsArktosMessage")
proto.RegisterType((*BuiltinsNetworkPolicyMessage)(nil), "mizar.BuiltinsNetworkPolicyMessage")
proto.RegisterType((*BuiltinsNamespaceMessage)(nil), "mizar.BuiltinsNamespaceMessage")
proto.RegisterType((*PortsMessage)(nil), "mizar.PortsMessage")
proto.RegisterType((*InterfacesMessage)(nil), "mizar.InterfacesMessage")
proto.RegisterType((*ReturnCode)(nil), "mizar.ReturnCode")
}
func init() { proto.RegisterFile("builtins.proto", fileDescriptor_54ab063ef2b175be) }
var fileDescriptor_54ab063ef2b175be = []byte{
// 800 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xdd, 0x4e, 0xdb, 0x48,
0x14, 0x26, 0xce, 0x1f, 0x39, 0x90, 0x10, 0x86, 0x85, 0xf5, 0x22, 0x58, 0x58, 0x23, 0xa4, 0xec,
0x5e, 0x70, 0x91, 0xbd, 0xd9, 0x55, 0xc5, 0x05, 0xa5, 0x51, 0x4b, 0x11, 0x24, 0x32, 0xf4, 0xb6,
0x91, 0x63, 0x0f, 0xc5, 0x25, 0xf1, 0x58, 0x1e, 0x87, 0x8a, 0x5e, 0xf5, 0x91, 0xfa, 0x08, 0x95,
0xfa, 0x60, 0xad, 0x66, 0xce, 0x4c, 0x12, 0x9b, 0x38, 0xaa, 0x92, 0xdc, 0xf9, 0xfc, 0x7d, 0x73,
0xbe, 0xef, 0xcc, 0x8f, 0xa1, 0xd6, 0x1b, 0xfa, 0xfd, 0xd8, 0x0f, 0xf8, 0x49, 0x18, 0xb1, 0x98,
0x91, 0xe2, 0xc0, 0xff, 0xec, 0x44, 0xd6, 0xff, 0xb0, 0xf5, 0x52, 0x05, 0xae, 0x99, 0x47, 0xaf,
0x28, 0xe7, 0xce, 0x07, 0x4a, 0x08, 0x14, 0x02, 0x67, 0x40, 0xcd, 0xdc, 0x61, 0xae, 0x51, 0xb1,
0xe5, 0x37, 0xa9, 0x81, 0xe1, 0x87, 0xa6, 0x21, 0x3d, 0x86, 0x1f, 0x5a, 0x5f, 0x0d, 0x20, 0xba,
0xb6, 0xc3, 0xbc, 0x59, 0xa5, 0xbf, 0x43, 0xf9, 0x9e, 0xf1, 0xb8, 0x3b, 0xaa, 0x2f, 0x09, 0xf3,
0x22, 0x24, 0x7b, 0x50, 0x11, 0x09, 0x3c, 0x74, 0x5c, 0x6a, 0xe6, 0x65, 0x68, 0xec, 0x20, 0x3b,
0x50, 0x8a, 0x69, 0xe0, 0x04, 0xb1, 0x59, 0xc0, 0x2a, 0xb4, 0x84, 0xbf, 0xef, 0xf4, 0x68, 0x9f,
0x9b, 0x45, 0xf4, 0xa3, 0x45, 0x8e, 0xa1, 0xe6, 0x44, 0x0f, 0x31, 0xe3, 0xdd, 0x80, 0xc6, 0x9f,
0x58, 0xf4, 0x60, 0x96, 0x64, 0xbc, 0x8a, 0xde, 0x6b, 0x74, 0x92, 0xdf, 0xa0, 0x18, 0xde, 0x3b,
0x9c, 0x9a, 0x65, 0x19, 0x45, 0x83, 0xfc, 0x07, 0xe0, 0x07, 0x31, 0x8d, 0xee, 0x1c, 0x97, 0x72,
0x73, 0xf5, 0x30, 0xdf, 0x58, 0x6b, 0x9a, 0x27, 0x52, 0xa5, 0x93, 0x8b, 0x51, 0x40, 0xb1, 0xb4,
0x27, 0x72, 0x49, 0x1d, 0xf2, 0x8f, 0xa1, 0x6b, 0x56, 0x24, 0x9a, 0xf8, 0x14, 0x0d, 0xf2, 0x61,
0x2f, 0xa0, 0xb1, 0x09, 0xd8, 0x20, 0x5a, 0xd6, 0xf7, 0x1c, 0xec, 0x68, 0xc9, 0x6e, 0x68, 0xf4,
0xe8, 0xbb, 0x33, 0x15, 0x7f, 0xce, 0xc7, 0x98, 0xc6, 0x67, 0x3e, 0x11, 0x71, 0x9c, 0x45, 0x3d,
0x4e, 0xcd, 0xa2, 0x34, 0x8d, 0x45, 0x39, 0xc1, 0xe2, 0x47, 0x0e, 0xfe, 0x4c, 0xb1, 0x68, 0x05,
0x5e, 0xc8, 0xfc, 0x20, 0x9e, 0xc5, 0x26, 0xd1, 0xa6, 0x91, 0xdd, 0x66, 0x3e, 0xd1, 0xe6, 0x01,
0xac, 0xf5, 0x1c, 0xf7, 0x81, 0x06, 0x5e, 0xd7, 0x0f, 0xb9, 0x59, 0x38, 0xcc, 0x37, 0x2a, 0x36,
0x28, 0xd7, 0x45, 0xc8, 0xc9, 0xdf, 0x50, 0x0c, 0x59, 0x14, 0x8b, 0xbd, 0x20, 0x46, 0xb6, 0xa5,
0x46, 0xd6, 0x11, 0x3e, 0x3d, 0x2d, 0xcc, 0x20, 0x0d, 0xa8, 0x4f, 0x60, 0x75, 0x3f, 0x72, 0x16,
0x28, 0xbe, 0xb5, 0x31, 0xe0, 0x5b, 0xce, 0x02, 0xb2, 0x0f, 0x20, 0x4b, 0x30, 0x07, 0xe9, 0x57,
0xa4, 0x47, 0x84, 0xad, 0x53, 0xd8, 0xd6, 0x02, 0x9c, 0xc9, 0x51, 0xcc, 0xe2, 0xad, 0x84, 0x35,
0x46, 0xc2, 0x5a, 0x5f, 0x72, 0xb0, 0x37, 0x3a, 0x75, 0x38, 0xc4, 0x0e, 0xeb, 0xfb, 0xee, 0xd3,
0xf2, 0xe5, 0xdb, 0x81, 0x52, 0x28, 0xa1, 0xf5, 0xf4, 0xd1, 0xb2, 0xde, 0x83, 0x39, 0xea, 0x40,
0x83, 0xcc, 0x5a, 0x7d, 0x8c, 0x6f, 0x64, 0x1c, 0xc5, 0xfc, 0xe4, 0x51, 0xb4, 0x22, 0x58, 0x9f,
0x9c, 0x00, 0x39, 0x82, 0xea, 0x5d, 0xc4, 0x82, 0x58, 0x68, 0x2f, 0x74, 0x54, 0xe0, 0xeb, 0xda,
0x29, 0x92, 0xc9, 0x5f, 0xb0, 0xae, 0xe7, 0x23, 0x73, 0x70, 0x29, 0x3d, 0x7f, 0x99, 0xb2, 0x0b,
0xab, 0xf2, 0xfe, 0x72, 0x59, 0x5f, 0xad, 0x38, 0xb2, 0xad, 0x36, 0x6c, 0x3e, 0x3b, 0xa8, 0xbf,
0x72, 0x93, 0x4d, 0x6c, 0xf4, 0x7c, 0x62, 0xa3, 0x5f, 0x02, 0xd8, 0x34, 0x1e, 0x46, 0xc1, 0x39,
0xf3, 0x04, 0x85, 0x82, 0xcb, 0x3c, 0x44, 0xaa, 0x35, 0x37, 0xd4, 0x3e, 0x13, 0xa1, 0xdb, 0xa7,
0x90, 0xda, 0x32, 0x48, 0x4c, 0x28, 0x0f, 0x70, 0x65, 0x85, 0xaf, 0xcd, 0x7f, 0x9a, 0xb0, 0xaa,
0x73, 0x49, 0x09, 0x8c, 0xf6, 0x65, 0x7d, 0x85, 0xd4, 0x00, 0x6e, 0x5b, 0x57, 0x9d, 0x6e, 0xcb,
0xb6, 0xdb, 0x76, 0x3d, 0x27, 0xec, 0x4e, 0xcb, 0xbe, 0x52, 0xb6, 0xd1, 0xfc, 0x56, 0x85, 0x8d,
0xd4, 0x49, 0x23, 0xe7, 0x50, 0x3d, 0x8f, 0xa8, 0x13, 0x53, 0xed, 0xd8, 0x57, 0x9d, 0x4c, 0xbf,
0x58, 0x76, 0x37, 0x55, 0x78, 0xcc, 0xc4, 0x5a, 0x11, 0x20, 0xef, 0x42, 0x6f, 0x71, 0x10, 0x9b,
0xf2, 0xe1, 0x60, 0x51, 0x90, 0x57, 0xb4, 0x4f, 0x17, 0xeb, 0xe4, 0x06, 0xb6, 0x13, 0x74, 0xf4,
0x75, 0x44, 0x8e, 0xa7, 0x83, 0xa5, 0xae, 0xab, 0x4c, 0xd0, 0x04, 0xbd, 0x65, 0x81, 0x26, 0xa6,
0xb7, 0x14, 0xd0, 0x17, 0x50, 0xc1, 0x4e, 0x3b, 0xcc, 0x23, 0x7f, 0xa4, 0x80, 0xc6, 0x4f, 0x73,
0x66, 0x31, 0x6a, 0x37, 0x67, 0x31, 0xd2, 0x99, 0xb3, 0x18, 0x47, 0x3f, 0x4f, 0xf1, 0x29, 0x00,
0xae, 0x2c, 0x7e, 0x5b, 0xc8, 0x6e, 0xaa, 0x7a, 0xe2, 0x5f, 0x26, 0xb3, 0x1c, 0x25, 0x9b, 0xbb,
0x1c, 0x45, 0x9b, 0xbb, 0x1c, 0x99, 0xcf, 0x57, 0xfe, 0x06, 0xb6, 0x90, 0xfb, 0x59, 0xf2, 0x3f,
0x20, 0x85, 0x93, 0x78, 0x9a, 0x32, 0x91, 0x50, 0x86, 0x65, 0x20, 0xa1, 0x22, 0x0b, 0x23, 0xb5,
0x35, 0x52, 0xe2, 0x69, 0x24, 0x47, 0x69, 0x95, 0xa6, 0x3c, 0x9c, 0x99, 0x80, 0x6a, 0xab, 0x2c,
0x0f, 0x50, 0x8d, 0x6f, 0x49, 0x80, 0xaf, 0x61, 0x43, 0x51, 0x1e, 0x3d, 0xe8, 0x07, 0x69, 0xb0,
0xd4, 0x2b, 0x9d, 0x09, 0xa4, 0xa8, 0x2e, 0x0e, 0xa4, 0x28, 0x2e, 0x06, 0xd4, 0x2b, 0xc9, 0xe7,
0xf9, 0xdf, 0x9f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xdf, 0x33, 0x75, 0x97, 0x80, 0x0c, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// BuiltinsServiceClient is the client API for BuiltinsService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type BuiltinsServiceClient interface {
// For Services/Service Endpoints, network controller may want to annotate the Endpoints.
// If Endpoints are not annotated, there will be many updates from unwanted endpoints.
CreateService(ctx context.Context, in *BuiltinsServiceMessage, opts ...grpc.CallOption) (*ReturnCode, error)
UpdateService(ctx context.Context, in *BuiltinsServiceMessage, opts ...grpc.CallOption) (*ReturnCode, error)
ResumeService(ctx context.Context, in *BuiltinsServiceMessage, opts ...grpc.CallOption) (*ReturnCode, error)
DeleteService(ctx context.Context, in *BuiltinsServiceMessage, opts ...grpc.CallOption) (*ReturnCode, error)
UpdateServiceEndpoint(ctx context.Context, in *BuiltinsServiceEndpointMessage, opts ...grpc.CallOption) (*ReturnCode, error)
ResumeServiceEndpoint(ctx context.Context, in *BuiltinsServiceEndpointMessage, opts ...grpc.CallOption) (*ReturnCode, error)
CreateServiceEndpoint(ctx context.Context, in *BuiltinsServiceEndpointMessage, opts ...grpc.CallOption) (*ReturnCode, error)
ResumePod(ctx context.Context, in *BuiltinsPodMessage, opts ...grpc.CallOption) (*ReturnCode, error)
UpdatePod(ctx context.Context, in *BuiltinsPodMessage, opts ...grpc.CallOption) (*ReturnCode, error)
CreatePod(ctx context.Context, in *BuiltinsPodMessage, opts ...grpc.CallOption) (*ReturnCode, error)
DeletePod(ctx context.Context, in *BuiltinsPodMessage, opts ...grpc.CallOption) (*ReturnCode, error)
CreateNode(ctx context.Context, in *BuiltinsNodeMessage, opts ...grpc.CallOption) (*ReturnCode, error)
ResumeNode(ctx context.Context, in *BuiltinsNodeMessage, opts ...grpc.CallOption) (*ReturnCode, error)
UpdateNode(ctx context.Context, in *BuiltinsNodeMessage, opts ...grpc.CallOption) (*ReturnCode, error)
DeleteNode(ctx context.Context, in *BuiltinsNodeMessage, opts ...grpc.CallOption) (*ReturnCode, error)
CreateArktosNetwork(ctx context.Context, in *BuiltinsArktosMessage, opts ...grpc.CallOption) (*ReturnCode, error)
ResumeArktosNetwork(ctx context.Context, in *BuiltinsArktosMessage, opts ...grpc.CallOption) (*ReturnCode, error)
UpdateArktosNetwork(ctx context.Context, in *BuiltinsArktosMessage, opts ...grpc.CallOption) (*ReturnCode, error)
UpdateNetworkPolicy(ctx context.Context, in *BuiltinsNetworkPolicyMessage, opts ...grpc.CallOption) (*ReturnCode, error)
CreateNetworkPolicy(ctx context.Context, in *BuiltinsNetworkPolicyMessage, opts ...grpc.CallOption) (*ReturnCode, error)
DeleteNetworkPolicy(ctx context.Context, in *BuiltinsNetworkPolicyMessage, opts ...grpc.CallOption) (*ReturnCode, error)
UpdateNamespace(ctx context.Context, in *BuiltinsNamespaceMessage, opts ...grpc.CallOption) (*ReturnCode, error)
CreateNamespace(ctx context.Context, in *BuiltinsNamespaceMessage, opts ...grpc.CallOption) (*ReturnCode, error)
DeleteNamespace(ctx context.Context, in *BuiltinsNamespaceMessage, opts ...grpc.CallOption) (*ReturnCode, error)
}
type builtinsServiceClient struct {
cc *grpc.ClientConn
}
func NewBuiltinsServiceClient(cc *grpc.ClientConn) BuiltinsServiceClient {
return &builtinsServiceClient{cc}
}
func (c *builtinsServiceClient) CreateService(ctx context.Context, in *BuiltinsServiceMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/CreateService", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) UpdateService(ctx context.Context, in *BuiltinsServiceMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/UpdateService", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) ResumeService(ctx context.Context, in *BuiltinsServiceMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/ResumeService", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) DeleteService(ctx context.Context, in *BuiltinsServiceMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/DeleteService", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) UpdateServiceEndpoint(ctx context.Context, in *BuiltinsServiceEndpointMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/UpdateServiceEndpoint", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) ResumeServiceEndpoint(ctx context.Context, in *BuiltinsServiceEndpointMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/ResumeServiceEndpoint", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) CreateServiceEndpoint(ctx context.Context, in *BuiltinsServiceEndpointMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/CreateServiceEndpoint", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) ResumePod(ctx context.Context, in *BuiltinsPodMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/ResumePod", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) UpdatePod(ctx context.Context, in *BuiltinsPodMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/UpdatePod", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) CreatePod(ctx context.Context, in *BuiltinsPodMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/CreatePod", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) DeletePod(ctx context.Context, in *BuiltinsPodMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/DeletePod", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) CreateNode(ctx context.Context, in *BuiltinsNodeMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/CreateNode", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) ResumeNode(ctx context.Context, in *BuiltinsNodeMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/ResumeNode", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) UpdateNode(ctx context.Context, in *BuiltinsNodeMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/UpdateNode", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) DeleteNode(ctx context.Context, in *BuiltinsNodeMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/DeleteNode", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) CreateArktosNetwork(ctx context.Context, in *BuiltinsArktosMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/CreateArktosNetwork", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) ResumeArktosNetwork(ctx context.Context, in *BuiltinsArktosMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/ResumeArktosNetwork", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) UpdateArktosNetwork(ctx context.Context, in *BuiltinsArktosMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/UpdateArktosNetwork", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) UpdateNetworkPolicy(ctx context.Context, in *BuiltinsNetworkPolicyMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/UpdateNetworkPolicy", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) CreateNetworkPolicy(ctx context.Context, in *BuiltinsNetworkPolicyMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/CreateNetworkPolicy", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) DeleteNetworkPolicy(ctx context.Context, in *BuiltinsNetworkPolicyMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/DeleteNetworkPolicy", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) UpdateNamespace(ctx context.Context, in *BuiltinsNamespaceMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/UpdateNamespace", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) CreateNamespace(ctx context.Context, in *BuiltinsNamespaceMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/CreateNamespace", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *builtinsServiceClient) DeleteNamespace(ctx context.Context, in *BuiltinsNamespaceMessage, opts ...grpc.CallOption) (*ReturnCode, error) {
out := new(ReturnCode)
err := c.cc.Invoke(ctx, "/mizar.BuiltinsService/DeleteNamespace", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// BuiltinsServiceServer is the server API for BuiltinsService service.
type BuiltinsServiceServer interface {
// For Services/Service Endpoints, network controller may want to annotate the Endpoints.
// If Endpoints are not annotated, there will be many updates from unwanted endpoints.
CreateService(context.Context, *BuiltinsServiceMessage) (*ReturnCode, error)
UpdateService(context.Context, *BuiltinsServiceMessage) (*ReturnCode, error)
ResumeService(context.Context, *BuiltinsServiceMessage) (*ReturnCode, error)
DeleteService(context.Context, *BuiltinsServiceMessage) (*ReturnCode, error)
UpdateServiceEndpoint(context.Context, *BuiltinsServiceEndpointMessage) (*ReturnCode, error)
ResumeServiceEndpoint(context.Context, *BuiltinsServiceEndpointMessage) (*ReturnCode, error)
CreateServiceEndpoint(context.Context, *BuiltinsServiceEndpointMessage) (*ReturnCode, error)
ResumePod(context.Context, *BuiltinsPodMessage) (*ReturnCode, error)
UpdatePod(context.Context, *BuiltinsPodMessage) (*ReturnCode, error)
CreatePod(context.Context, *BuiltinsPodMessage) (*ReturnCode, error)
DeletePod(context.Context, *BuiltinsPodMessage) (*ReturnCode, error)
CreateNode(context.Context, *BuiltinsNodeMessage) (*ReturnCode, error)
ResumeNode(context.Context, *BuiltinsNodeMessage) (*ReturnCode, error)
UpdateNode(context.Context, *BuiltinsNodeMessage) (*ReturnCode, error)
DeleteNode(context.Context, *BuiltinsNodeMessage) (*ReturnCode, error)
CreateArktosNetwork(context.Context, *BuiltinsArktosMessage) (*ReturnCode, error)
ResumeArktosNetwork(context.Context, *BuiltinsArktosMessage) (*ReturnCode, error)
UpdateArktosNetwork(context.Context, *BuiltinsArktosMessage) (*ReturnCode, error)
UpdateNetworkPolicy(context.Context, *BuiltinsNetworkPolicyMessage) (*ReturnCode, error)
CreateNetworkPolicy(context.Context, *BuiltinsNetworkPolicyMessage) (*ReturnCode, error)
DeleteNetworkPolicy(context.Context, *BuiltinsNetworkPolicyMessage) (*ReturnCode, error)
UpdateNamespace(context.Context, *BuiltinsNamespaceMessage) (*ReturnCode, error)
CreateNamespace(context.Context, *BuiltinsNamespaceMessage) (*ReturnCode, error)
DeleteNamespace(context.Context, *BuiltinsNamespaceMessage) (*ReturnCode, error)
}
// UnimplementedBuiltinsServiceServer can be embedded to have forward compatible implementations.
type UnimplementedBuiltinsServiceServer struct {
}
func (*UnimplementedBuiltinsServiceServer) CreateService(ctx context.Context, req *BuiltinsServiceMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreateService not implemented")
}
func (*UnimplementedBuiltinsServiceServer) UpdateService(ctx context.Context, req *BuiltinsServiceMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdateService not implemented")
}
func (*UnimplementedBuiltinsServiceServer) ResumeService(ctx context.Context, req *BuiltinsServiceMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method ResumeService not implemented")
}
func (*UnimplementedBuiltinsServiceServer) DeleteService(ctx context.Context, req *BuiltinsServiceMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeleteService not implemented")
}
func (*UnimplementedBuiltinsServiceServer) UpdateServiceEndpoint(ctx context.Context, req *BuiltinsServiceEndpointMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdateServiceEndpoint not implemented")
}
func (*UnimplementedBuiltinsServiceServer) ResumeServiceEndpoint(ctx context.Context, req *BuiltinsServiceEndpointMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method ResumeServiceEndpoint not implemented")
}
func (*UnimplementedBuiltinsServiceServer) CreateServiceEndpoint(ctx context.Context, req *BuiltinsServiceEndpointMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreateServiceEndpoint not implemented")
}
func (*UnimplementedBuiltinsServiceServer) ResumePod(ctx context.Context, req *BuiltinsPodMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method ResumePod not implemented")
}
func (*UnimplementedBuiltinsServiceServer) UpdatePod(ctx context.Context, req *BuiltinsPodMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdatePod not implemented")
}
func (*UnimplementedBuiltinsServiceServer) CreatePod(ctx context.Context, req *BuiltinsPodMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreatePod not implemented")
}
func (*UnimplementedBuiltinsServiceServer) DeletePod(ctx context.Context, req *BuiltinsPodMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeletePod not implemented")
}
func (*UnimplementedBuiltinsServiceServer) CreateNode(ctx context.Context, req *BuiltinsNodeMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreateNode not implemented")
}
func (*UnimplementedBuiltinsServiceServer) ResumeNode(ctx context.Context, req *BuiltinsNodeMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method ResumeNode not implemented")
}
func (*UnimplementedBuiltinsServiceServer) UpdateNode(ctx context.Context, req *BuiltinsNodeMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdateNode not implemented")
}
func (*UnimplementedBuiltinsServiceServer) DeleteNode(ctx context.Context, req *BuiltinsNodeMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeleteNode not implemented")
}
func (*UnimplementedBuiltinsServiceServer) CreateArktosNetwork(ctx context.Context, req *BuiltinsArktosMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreateArktosNetwork not implemented")
}
func (*UnimplementedBuiltinsServiceServer) ResumeArktosNetwork(ctx context.Context, req *BuiltinsArktosMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method ResumeArktosNetwork not implemented")
}
func (*UnimplementedBuiltinsServiceServer) UpdateArktosNetwork(ctx context.Context, req *BuiltinsArktosMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdateArktosNetwork not implemented")
}
func (*UnimplementedBuiltinsServiceServer) UpdateNetworkPolicy(ctx context.Context, req *BuiltinsNetworkPolicyMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdateNetworkPolicy not implemented")
}
func (*UnimplementedBuiltinsServiceServer) CreateNetworkPolicy(ctx context.Context, req *BuiltinsNetworkPolicyMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreateNetworkPolicy not implemented")
}
func (*UnimplementedBuiltinsServiceServer) DeleteNetworkPolicy(ctx context.Context, req *BuiltinsNetworkPolicyMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeleteNetworkPolicy not implemented")
}
func (*UnimplementedBuiltinsServiceServer) UpdateNamespace(ctx context.Context, req *BuiltinsNamespaceMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdateNamespace not implemented")
}
func (*UnimplementedBuiltinsServiceServer) CreateNamespace(ctx context.Context, req *BuiltinsNamespaceMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreateNamespace not implemented")
}
func (*UnimplementedBuiltinsServiceServer) DeleteNamespace(ctx context.Context, req *BuiltinsNamespaceMessage) (*ReturnCode, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeleteNamespace not implemented")
}
func RegisterBuiltinsServiceServer(s *grpc.Server, srv BuiltinsServiceServer) {
s.RegisterService(&_BuiltinsService_serviceDesc, srv)
}
func _BuiltinsService_CreateService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsServiceMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).CreateService(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/CreateService",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).CreateService(ctx, req.(*BuiltinsServiceMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_UpdateService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsServiceMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).UpdateService(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/UpdateService",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).UpdateService(ctx, req.(*BuiltinsServiceMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_ResumeService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsServiceMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).ResumeService(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/ResumeService",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).ResumeService(ctx, req.(*BuiltinsServiceMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_DeleteService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsServiceMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).DeleteService(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/DeleteService",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).DeleteService(ctx, req.(*BuiltinsServiceMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_UpdateServiceEndpoint_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsServiceEndpointMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).UpdateServiceEndpoint(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/UpdateServiceEndpoint",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).UpdateServiceEndpoint(ctx, req.(*BuiltinsServiceEndpointMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_ResumeServiceEndpoint_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsServiceEndpointMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).ResumeServiceEndpoint(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/ResumeServiceEndpoint",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).ResumeServiceEndpoint(ctx, req.(*BuiltinsServiceEndpointMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_CreateServiceEndpoint_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsServiceEndpointMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).CreateServiceEndpoint(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/CreateServiceEndpoint",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).CreateServiceEndpoint(ctx, req.(*BuiltinsServiceEndpointMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_ResumePod_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsPodMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).ResumePod(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/ResumePod",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).ResumePod(ctx, req.(*BuiltinsPodMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_UpdatePod_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsPodMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).UpdatePod(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/UpdatePod",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).UpdatePod(ctx, req.(*BuiltinsPodMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_CreatePod_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsPodMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).CreatePod(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/CreatePod",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).CreatePod(ctx, req.(*BuiltinsPodMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_DeletePod_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsPodMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).DeletePod(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/DeletePod",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).DeletePod(ctx, req.(*BuiltinsPodMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_CreateNode_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsNodeMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).CreateNode(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/CreateNode",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).CreateNode(ctx, req.(*BuiltinsNodeMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_ResumeNode_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsNodeMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).ResumeNode(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/ResumeNode",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).ResumeNode(ctx, req.(*BuiltinsNodeMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_UpdateNode_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsNodeMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).UpdateNode(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/UpdateNode",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).UpdateNode(ctx, req.(*BuiltinsNodeMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_DeleteNode_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsNodeMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).DeleteNode(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/DeleteNode",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).DeleteNode(ctx, req.(*BuiltinsNodeMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_CreateArktosNetwork_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsArktosMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).CreateArktosNetwork(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/CreateArktosNetwork",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).CreateArktosNetwork(ctx, req.(*BuiltinsArktosMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_ResumeArktosNetwork_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsArktosMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).ResumeArktosNetwork(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/ResumeArktosNetwork",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).ResumeArktosNetwork(ctx, req.(*BuiltinsArktosMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_UpdateArktosNetwork_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsArktosMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).UpdateArktosNetwork(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/UpdateArktosNetwork",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).UpdateArktosNetwork(ctx, req.(*BuiltinsArktosMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_UpdateNetworkPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsNetworkPolicyMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).UpdateNetworkPolicy(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/UpdateNetworkPolicy",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).UpdateNetworkPolicy(ctx, req.(*BuiltinsNetworkPolicyMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_CreateNetworkPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsNetworkPolicyMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).CreateNetworkPolicy(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/CreateNetworkPolicy",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).CreateNetworkPolicy(ctx, req.(*BuiltinsNetworkPolicyMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_DeleteNetworkPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsNetworkPolicyMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).DeleteNetworkPolicy(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/DeleteNetworkPolicy",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).DeleteNetworkPolicy(ctx, req.(*BuiltinsNetworkPolicyMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_UpdateNamespace_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsNamespaceMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).UpdateNamespace(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/UpdateNamespace",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).UpdateNamespace(ctx, req.(*BuiltinsNamespaceMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_CreateNamespace_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsNamespaceMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).CreateNamespace(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/CreateNamespace",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).CreateNamespace(ctx, req.(*BuiltinsNamespaceMessage))
}
return interceptor(ctx, in, info, handler)
}
func _BuiltinsService_DeleteNamespace_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BuiltinsNamespaceMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(BuiltinsServiceServer).DeleteNamespace(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/mizar.BuiltinsService/DeleteNamespace",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(BuiltinsServiceServer).DeleteNamespace(ctx, req.(*BuiltinsNamespaceMessage))
}
return interceptor(ctx, in, info, handler)
}
var _BuiltinsService_serviceDesc = grpc.ServiceDesc{
ServiceName: "mizar.BuiltinsService",
HandlerType: (*BuiltinsServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "CreateService",
Handler: _BuiltinsService_CreateService_Handler,
},
{
MethodName: "UpdateService",
Handler: _BuiltinsService_UpdateService_Handler,
},
{
MethodName: "ResumeService",
Handler: _BuiltinsService_ResumeService_Handler,
},
{
MethodName: "DeleteService",
Handler: _BuiltinsService_DeleteService_Handler,
},
{
MethodName: "UpdateServiceEndpoint",
Handler: _BuiltinsService_UpdateServiceEndpoint_Handler,
},
{
MethodName: "ResumeServiceEndpoint",
Handler: _BuiltinsService_ResumeServiceEndpoint_Handler,
},
{
MethodName: "CreateServiceEndpoint",
Handler: _BuiltinsService_CreateServiceEndpoint_Handler,
},
{
MethodName: "ResumePod",
Handler: _BuiltinsService_ResumePod_Handler,
},
{
MethodName: "UpdatePod",
Handler: _BuiltinsService_UpdatePod_Handler,
},
{
MethodName: "CreatePod",
Handler: _BuiltinsService_CreatePod_Handler,
},
{
MethodName: "DeletePod",
Handler: _BuiltinsService_DeletePod_Handler,
},
{
MethodName: "CreateNode",
Handler: _BuiltinsService_CreateNode_Handler,
},
{
MethodName: "ResumeNode",
Handler: _BuiltinsService_ResumeNode_Handler,
},
{
MethodName: "UpdateNode",
Handler: _BuiltinsService_UpdateNode_Handler,
},
{
MethodName: "DeleteNode",
Handler: _BuiltinsService_DeleteNode_Handler,
},
{
MethodName: "CreateArktosNetwork",
Handler: _BuiltinsService_CreateArktosNetwork_Handler,
},
{
MethodName: "ResumeArktosNetwork",
Handler: _BuiltinsService_ResumeArktosNetwork_Handler,
},
{
MethodName: "UpdateArktosNetwork",
Handler: _BuiltinsService_UpdateArktosNetwork_Handler,
},
{
MethodName: "UpdateNetworkPolicy",
Handler: _BuiltinsService_UpdateNetworkPolicy_Handler,
},
{
MethodName: "CreateNetworkPolicy",
Handler: _BuiltinsService_CreateNetworkPolicy_Handler,
},
{
MethodName: "DeleteNetworkPolicy",
Handler: _BuiltinsService_DeleteNetworkPolicy_Handler,
},
{
MethodName: "UpdateNamespace",
Handler: _BuiltinsService_UpdateNamespace_Handler,
},
{
MethodName: "CreateNamespace",
Handler: _BuiltinsService_CreateNamespace_Handler,
},
{
MethodName: "DeleteNamespace",
Handler: _BuiltinsService_DeleteNamespace_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "builtins.proto",
}
|
<filename>meetings/views.py
from django.shortcuts import render, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.utils import timezone
from django.http import JsonResponse
from django.conf import settings
from .models import Attendee, Meeting
@login_required
def home(request):
upcoming = Attendee.objects.filter(user=request.user, meeting__start_time__gt=timezone.now()).order_by('meeting__start_time')
return render(request, 'meetings/dashboard.html', {"upcoming": upcoming})
@login_required
def meeting_data(request, room):
attendee = Attendee.objects.get(meeting__room_id=room, user=request.user)
return JsonResponse({
"jwt": attendee.jwt(),
"roomName": attendee.meeting.room_id,
"domain": settings.JITSI_DOMAIN,
"subject": attendee.meeting.subject
})
@login_required
def meeting(request, room):
if not Attendee.objects.filter(user=request.user, meeting__room_id=room).exists():
raise PermissionDenied
meet = get_object_or_404(Meeting, room_id=room)
attendees = meet.attendees.all()
guest_link = None
if meet.is_anonymous:
guest_link = "https://{}/{}?jwt={}".format(settings.JITSI_DOMAIN, meet.room_id, meet.jwt())
return render(request, 'meetings/meeting.html', {"meeting": meet, "attendees": attendees, "guest_link": guest_link})
|
<filename>gulpfile.js
/*global -$ */
'use strict';
// generated on 2016-01-04 using generator-modern-frontend 0.2.8
var fs = require('fs');
var path = require('path');
var gulp = require('gulp');
var $ = require('gulp-load-plugins')();
var browserSync = require('browser-sync');
var reload = browserSync.reload;
var through2 = require('through2');
var browserify = require('browserify');
var isDevelopment = (process.env.ENVIRONMENT !== "production");
gulp.task('stylesheet', function () {
return gulp.src('app/css/main.styl')
.pipe($.if(isDevelopment, $.sourcemaps.init()))
.pipe($.stylus({
errors: true
}))
.on('error', function (error) {
console.log(error.stack);
this.emit('end');
})
.pipe($.postcss([
require('autoprefixer-core')({browsers: ['last 1 version']})
]))
.pipe($.if(isDevelopment, $.sourcemaps.write()))
.pipe(gulp.dest('.tmp/css'))
.pipe(reload({stream: true}));
});
gulp.task('javascript', function () {
return gulp.src('app/js/main.js')
.pipe(through2.obj(function (file, enc, next){ // workaround for https://github.com/babel/babelify/issues/46
browserify({
entries: file.path,
debug: isDevelopment
}).bundle(function(err, res){
if (err) { return next(err); }
file.contents = res;
next(null, file);
});
}))
.on('error', function (error) {
console.log(error.stack);
this.emit('end');
})
.pipe(gulp.dest('dist/js'))
.pipe($.if(isDevelopment, $.sourcemaps.init({loadMaps: true})))
.pipe($.if(isDevelopment, $.sourcemaps.write('.')))
.pipe(gulp.dest('.tmp/js'));
});
gulp.task('jshint', function () {
return gulp.src('app/js/**/*.js')
.pipe(reload({stream: true, once: true}))
.pipe($.jshint())
.pipe($.jshint.reporter('jshint-stylish'))
.pipe($.if(!browserSync.active, $.jshint.reporter('fail')));
});
gulp.task('html', ['javascript', 'stylesheet'], function () {
var assets = $.useref.assets({searchPath: ['.tmp', 'app/*.html', '.']});
return gulp.src('app/*.html')
.pipe(assets)
.pipe($.if('*.js', $.uglify()))
.pipe($.if('*.css', $.csso()))
.pipe(assets.restore())
.pipe($.useref())
.pipe($.if('*.html', $.minifyHtml({conditionals: true, loose: true})))
.pipe(gulp.dest('dist'));
});
gulp.task('images', function () {
return gulp.src('app/images/**/*')
.pipe(gulp.dest('dist/images'));
});
gulp.task('fonts', function () {
var pattern = 'app/fonts/**/*'
return gulp.src(require('main-bower-files')({
filter: '**/*.{eot,svg,ttf,woff,woff2}'
}).concat(pattern))
.pipe(gulp.dest('.tmp/fonts'))
.pipe(gulp.dest('dist/fonts'));
});
gulp.task('extras', function () {
return gulp.src([
'app/*.*',
'!app/*.html'
], {
dot: true
}).pipe(gulp.dest('dist'));
});
gulp.task('clean', require('del').bind(null, ['.tmp', 'dist']));
gulp.task('serve', ['stylesheet', 'javascript', 'fonts'], function () {
browserSync({
notify: false,
port: 9000,
server: {
baseDir: ['.tmp', 'app'],
routes: {
'/bower_components': 'bower_components'
}
}
});
// watch for changes
gulp.watch([
'app/*.html',
'.tmp/js/*.{js,jsx}',
'app/images/**/*',
'.tmp/fonts/**/*'
]).on('change', reload);
gulp.watch(['app/css/**/*.styl'], ['stylesheet']);
gulp.watch('app/js/**/*.{js,jsx}', ['javascript']);
gulp.watch('app/fonts/**/*', ['fonts']);
});
gulp.task('serve:dist', function () {
browserSync({
notify: false,
port: 9000,
server: {
baseDir: ['dist']
}
});
});
// inject bower components
gulp.task('wiredep', function () {
var wiredep = require('wiredep').stream;
gulp.src('app/css/*.styl')
.pipe(wiredep({
ignorePath: /^(\.\.\/)+/
}))
.pipe(gulp.dest('app/css'));
gulp.src('app/*.html')
.pipe(wiredep({
// exclude: ['bootstrap-sass-official'],
ignorePath: /^(\.\.\/)*\.\./
}))
.pipe(gulp.dest('app'));
});
gulp.task('build', ['html', 'images', 'fonts', 'extras'], function () {
return gulp.src('dist/**/*').pipe($.size({title: 'build', gzip: true}));
});
gulp.task('default', ['clean'], function () {
gulp.start('build');
});
|
if [ ! -d vendor ]; then
mkdir vendor
fi
wget https://raw.github.com/gist/221634/2bc31f04b0ed0ef70daab68516c8d17ba0753f5e/SplClassLoader.php -O vendor/SplClassLoader.php
|
#!/bin/bash
output=$1
shift
i=1
for var in "$@"
do
if [[ -s $var ]] ; then
( head -q -n 1 $var ) > $output
break
fi
i=$[i+1]
done
if [ $i -le "$#" ]
then
( tail -q -n +2 $@ )>> $output
else
exit 5
fi
|
package java_8_in_action.stream_collector;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static java.util.stream.Collectors.toList;
import java_8_in_action.stream_collector.Dish.Type;
public class MiddleOperatorExmaple {
public static void main(String [] args) {
List<Dish> menu = Arrays.asList(
new Dish("pork", false, 800, Dish.Type.MEAT),
new Dish("beef", false, 700, Dish.Type.MEAT),
new Dish("chicken", false, 400, Dish.Type.MEAT),
new Dish("french", false, 530, Dish.Type.OTHER),
new Dish("rice", true, 350, Dish.Type.OTHER),
new Dish("season fruit", true, 120, Dish.Type.OTHER),
new Dish("pizza", true, 550, Dish.Type.OTHER),
new Dish("prawns", false, 300, Dish.Type.FISH),
new Dish("salmon", false, 450, Dish.Type.FISH)
);
List<String> streamExample = menu.stream()
.filter(d -> d.getType() == Type.MEAT)
.filter(d -> d.getCalories() > 400)
.map(Dish::getName)
.skip(1)
.collect(toList());
System.out.println(streamExample);
}
}
|
#!/bin/bash
#$ -S /bin/bash
#$ -N chiralDFT
#$ -l h_rt=168:00:00
#$ -q CLG6242deb384C,CLG6226Rdeb192D,CLG5218deb192D,CLG5218deb192Th,SLG6142deb384C
#$ -pe mpi8_debian 16
#$ -cwd
#$ -V
#$ -m be
module load GCC/7.2.0/OpenMPI/3.0.0
# given by SGE
HOSTFILE="${TMPDIR}/machines"
cd "${SGE_O_WORKDIR}" || { echo "cannot cd to ${SGE_O_WORKDIR}"; exit 1; }
PREFIX="/applis/PSMN/debian9/software/Compiler/GCC/7.2.0/OpenMPI/3.0.0"
MPIRUN="${PREFIX}/bin/mpirun"
"${MPIRUN}" -prefix "${PREFIX}" -hostfile "${HOSTFILE}" -np "${NSLOTS}" TARGETPATH/TARGET > DATPATH/log.out
|
set -u
set -e
./train.sh
./translate.sh word-vec vocab result
|
#Function to calculate the sum of the natural numbers
def sumOfNaturals(n):
#using Gauss formula
if n % 2 == 0:
sum = (n//2)*(n+1)
else:
sum = (n//2)*(n+1) + (n+1)//2
return sum |
package com.github.robindevilliers.welcometohell.wizard;
import com.github.robindevilliers.welcometohell.wizard.domain.RouteMappings;
import com.github.robindevilliers.welcometohell.wizard.domain.Rule;
import com.github.robindevilliers.welcometohell.wizard.domain.View;
import com.github.robindevilliers.welcometohell.wizard.domain.Wizard;
import com.github.robindevilliers.welcometohell.wizard.domain.types.DataElement;
import com.github.robindevilliers.welcometohell.wizard.exception.SessionExpiredException;
import com.github.robindevilliers.welcometohell.wizard.type.Serialization;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import com.github.robindevilliers.welcometohell.wizard.expression.Expression;
import com.github.robindevilliers.welcometohell.wizard.expression.ExpressionParser;
import javax.xml.parsers.SAXParserFactory;
import java.io.StringWriter;
import java.util.AbstractMap;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;
@Component
public class WizardEngine {
private Map<String, Wizard> cache = Collections.synchronizedMap(new HashMap<>());
private Map<String, WizardState> database = Collections.synchronizedMap(new HashMap<>());
@Autowired
private WizardBuilder wizardBuilder;
@Autowired
private ExpressionParser expressionParser;
@Autowired
@Qualifier("widgetEngine")
private VelocityEngine velocityEngine;
public String start(String wizardId) {
Wizard wizard = getWizard(wizardId);
WizardState wizardState = new WizardState(wizard, wizard.getStartView());
database.put(wizardState.getWizardSessionId(), wizardState);
return wizardState.getWizardSessionId();
}
public Wizard getCurrentWizard(String wizardSessionId) {
WizardState wizardState = database.get(wizardSessionId);
if (wizardState == null) {
throw new SessionExpiredException();
}
return wizardState.getWizard();
}
public View getView(String wizardSessionId, String pageId) {
return database.get(wizardSessionId).getViewForPageId(pageId);
}
public String getLastPageId(String wizardSessionId) {
return database.get(wizardSessionId).getLastPageId();
}
public String getLastViewExternalName(String wizardSessionId) {
String pageId = database.get(wizardSessionId).getLastPageId();
View view = database.get(wizardSessionId).getViewForPageId(pageId);
return view.getExternalName();
}
public Map<String, Object> validateData(String wizardSessionId, String pageId, Map<String, String> input) {
View view = this.getView(wizardSessionId, pageId);
return input.entrySet()
.stream()
.map(e -> {
DataElement dataElement = view.findDataElement(e.getKey());
Object value = Serialization.deserialize(dataElement.getType(), e.getValue());
//TODO - validation should happen here.
return new AbstractMap.SimpleEntry<>(e.getKey(), value);
})
.collect(Collectors.toMap(Map.Entry::getKey, AbstractMap.SimpleEntry::getValue));
}
public void submitData(String wizardSessionId, String pageId, Map<String, Object> input) {
database.get(wizardSessionId).setDataOnView(pageId, input);
}
public Map<String, Object> getData(String wizardSessionId) {
return database.get(wizardSessionId).buildDataImage();
}
public void route(String wizardSessionId, String pageId) {
WizardState wizardState = database.get(wizardSessionId);
Map<String, Object> data = wizardState.buildDataImage();
View nextView = null;
RouteMappings routeMappings = wizardState.getViewForPageId(pageId).getRouteMappings();
for (Rule rule : routeMappings.getRules()) {
Expression expression = expressionParser.generate(rule.getCondition());
if (expression.matches(data)) {
Map<String, Object> dataOnView = wizardState.getDataOnView(pageId);
rule.getVariables().forEach(v -> {
StringWriter content = new StringWriter();
velocityEngine.evaluate(new VelocityContext(data), content, "variable-resolution", v.text);
dataOnView.put(v.name, content.toString());
});
nextView = wizardState.getWizard().findView(rule.getViewId());
}
}
if (nextView == null) {
if (routeMappings.getDefault() == null) {
throw new RuntimeException("No routes matched and no default supplied");
}
Map<String, Object> dataOnView = wizardState.getDataOnView(pageId);
routeMappings.getDefault().getVariables().forEach(v -> {
StringWriter content = new StringWriter();
velocityEngine.evaluate(new VelocityContext(data), content, "variable-resolution", v.text);
dataOnView.put(v.name, content.toString());
});
nextView = wizardState.getWizard().findView(routeMappings.getDefault().getViewId());
}
wizardState.setNextView(pageId, nextView);
}
public void setNextView(String wizardSessionId, String pageId, String viewId) {
//this method lets the browser set arbitrary following views from any page id.
//This will remove all history after that page id.
//TODO - consider how this logic is impacted by change sets.
//TODO - should a link reset all history of state or should it add to the top?
WizardState wizardState = database.get(wizardSessionId);
View nextView = wizardState.getWizard().findView(viewId);
wizardState.setNextView(pageId, nextView);
}
private Wizard getWizard(String name) {
synchronized (cache) {
if (cache.containsKey(name)) {
return cache.get(name);
}
try {
SAXParserFactory.newInstance().newSAXParser().parse(
WizardEngine.class.getResourceAsStream("/" + name + ".xml"),
wizardBuilder
);
cache.put(name, wizardBuilder.getWizard());
return wizardBuilder.getWizard();
} catch (Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else {
throw new RuntimeException(e);
}
}
}
}
}
|
#!/bin/bash
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
pushd $(dirname "$0") > /dev/null
echo "no job in the refresh script of end-to-end-test"
popd > /dev/null |
def longest_common_substring(s1, s2):
"""
Find the longest common substring between two strings.
Args:
s1 (str): The first string.
s2 (str): The second string.
Returns:
str: The longest common substring between s1 and s2.
"""
m = [[0] * (1 + len(s2)) for i in range(1 + len(s1))]
longest, x_longest = 0, 0
for x in range(1, 1 + len(s1)):
for y in range(1, 1 + len(s2)):
if s1[x - 1] == s2[y - 1]:
m[x][y] = m[x - 1][y - 1] + 1
if m[x][y] > longest:
longest = m[x][y]
x_longest = x
else:
m[x][y] = 0
return s1[x_longest - longest: x_longest] |
/*global define*/
define(function(require) {
var templateNode = require('tmpl!./settings_main.html'),
tngAccountItemNode = require('tmpl!./tng/account_item.html'),
common = require('mail_common'),
MailAPI = require('api'),
Cards = common.Cards;
/**
* Global settings, list of accounts.
*/
function SettingsMainCard(domNode, mode, args) {
this.domNode = domNode;
this.acctsSlice = MailAPI.viewAccounts(false);
this.acctsSlice.onsplice = this.onAccountsSplice.bind(this);
domNode.getElementsByClassName('tng-close-btn')[0]
.addEventListener('click', this.onClose.bind(this), false);
this.accountsContainer =
domNode.getElementsByClassName('tng-accounts-container')[0];
domNode.getElementsByClassName('tng-account-add')[0]
.addEventListener('click', this.onClickAddAccount.bind(this), false);
this._secretButtonClickCount = 0;
this._secretButtonTimer = null;
// TODO: Need to remove the secret debug entry before shipping.
domNode.getElementsByClassName('tng-email-lib-version')[0]
.addEventListener('click', this.onClickSecretButton.bind(this), false);
}
SettingsMainCard.prototype = {
nextCards: ['setup_account_info', 'settings_account'],
onClose: function() {
Cards.removeCardAndSuccessors(this.domNode, 'animate', 1, 1);
},
onAccountsSplice: function(index, howMany, addedItems,
requested, moreExpected) {
var accountsContainer = this.accountsContainer;
var account;
if (howMany) {
for (var i = index + howMany - 1; i >= index; i--) {
account = this.acctsSlice.items[i];
accountsContainer.removeChild(account.element);
}
}
var insertBuddy = (index >= accountsContainer.childElementCount) ?
null : accountsContainer.children[index],
self = this;
addedItems.forEach(function(account) {
var accountNode = account.element =
tngAccountItemNode.cloneNode(true);
accountNode.account = account;
self.updateAccountDom(account, true);
accountsContainer.insertBefore(accountNode, insertBuddy);
});
},
updateAccountDom: function(account, firstTime) {
var accountNode = account.element;
if (firstTime) {
var accountLabel =
accountNode.getElementsByClassName('tng-account-item-label')[0];
accountLabel.textContent = account.name;
accountLabel.addEventListener('click',
this.onClickEnterAccount.bind(this, account), false);
}
},
onClickAddAccount: function() {
Cards.pushCard(
'setup_account_info', 'default', 'animate',
{
allowBack: true
},
'right');
},
onClickEnterAccount: function(account) {
Cards.pushCard(
'settings_account', 'default', 'animate',
{
account: account
},
'right');
},
onClickSecretButton: function() {
if (this._secretButtonTimer === null) {
this._secretButtonTimer = window.setTimeout(
function() {
this._secretButtonTimer = null;
this._secretButtonClickCount = 0;
}.bind(this), 2000);
}
if (++this._secretButtonClickCount >= 5) {
window.clearTimeout(this._secretButtonTimer);
this._secretButtonTimer = null;
this._secretButtonClickCount = 0;
Cards.pushCard('settings_debug', 'default', 'animate', {}, 'right');
}
},
die: function() {
this.acctsSlice.die();
}
};
Cards.defineCardWithDefaultMode(
'settings_main',
{ tray: false },
SettingsMainCard,
templateNode
);
return SettingsMainCard;
});
|
package dao;
public interface ClientDao {
}
|
name := "msgpack-rawparser"
version := "0.2.2"
scalaVersion := "2.10.6"
|
from django.urls import path
from django.conf.urls import url,include
from . import views
urlpatterns = [
path('',views.rankingView,name='ranking'),
path('.list',views.RankingList.as_view,name='rankingList')
] |
<reponame>StanislavMishin/external-courses
const number = 0;
function justNumber(num) {
if (num > 1000) {
console.log('Данные неверны');
return 'Данные неверны';
}
if (num === 1 || num === 0) {
return 'Не причисляется ни к простым, ни к составным числам';
}
let checkbox = true;
for (let i = 2; i < num; i += 1) {
if (num % i === 0) {
checkbox = false;
break;
}
}
const result = (checkbox) ? `Число ${num} - простое число` : `Число ${num} - составное число`;
return result;
}
justNumber(number);
module.exports = justNumber;
|
/*
This file is part of the JitCat library.
Copyright (C) <NAME> 2019
Distributed under the MIT License (license terms are at http://opensource.org/licenses/MIT).
*/
#include "jitcat/CatFunctionOrConstructor.h"
#include "jitcat/CatArgumentList.h"
#include "jitcat/CatBuiltInFunctionCall.h"
#include "jitcat/CatOperatorNew.h"
#include "jitcat/CatMemberFunctionCall.h"
#include "jitcat/CatStaticFunctionCall.h"
#include "jitcat/CatTypeOrIdentifier.h"
using namespace jitcat;
using namespace jitcat::AST;
CatFunctionOrConstructor::CatFunctionOrConstructor(CatTypeOrIdentifier* typeOrIdentifier, CatArgumentList* argumentList, const Tokenizer::Lexeme& lexeme):
CatASTNode(lexeme),
typeOrIdentifier(typeOrIdentifier),
argumentList(argumentList)
{
}
CatFunctionOrConstructor::CatFunctionOrConstructor(const CatFunctionOrConstructor& other):
CatASTNode(other),
typeOrIdentifier(other.typeOrIdentifier == nullptr ? nullptr : new CatTypeOrIdentifier(*other.typeOrIdentifier.get())),
argumentList(other.argumentList == nullptr ? nullptr : new CatArgumentList(*other.argumentList.get()))
{
}
CatFunctionOrConstructor::~CatFunctionOrConstructor()
{
}
CatASTNode* CatFunctionOrConstructor::copy() const
{
return new CatFunctionOrConstructor(*this);
}
void CatFunctionOrConstructor::print() const
{
typeOrIdentifier->print();
argumentList->print();
}
CatASTNodeType CatFunctionOrConstructor::getNodeType() const
{
return CatASTNodeType::FunctionOrConstructorCall;
}
CatASTNode* CatFunctionOrConstructor::toConstructorCall()
{
return typeOrIdentifier->toConstructorCall(argumentList.release());
}
CatASTNode* CatFunctionOrConstructor::toFunctionCall()
{
return typeOrIdentifier->toFunctionCall(argumentList.release());
}
|
<reponame>orhoj/concordium-desktop-wallet
import React from 'react';
import { AddAnonymityRevoker } from '~/utils/types';
import { fieldDisplays } from './CreateAddAnonymityRevoker';
import PublicKeyDetails from '~/components/ledger/PublicKeyDetails';
interface Props {
addAnonymityRevoker: AddAnonymityRevoker;
}
/**
* Displays an overview of an addAnonymityRevoker transaction payload.
*/
export default function AddAnonymityRevokerView({
addAnonymityRevoker,
}: Props) {
return (
<>
<div className="body1">
<h5 className="mB0">{fieldDisplays.name}</h5>
{addAnonymityRevoker.arDescription.name}
</div>
<div className="body1">
<h5 className="mB0">{fieldDisplays.url}</h5>
{addAnonymityRevoker.arDescription.url}
</div>
<div className="body1">
<h5 className="mB0">{fieldDisplays.description}</h5>
{addAnonymityRevoker.arDescription.description}
</div>
<div className="body1">
<h5 className="mB0">{fieldDisplays.arIdentity}</h5>
{addAnonymityRevoker.arIdentity}
</div>
<div className="body1">
<h5 className="mB0">{fieldDisplays.arPublicKey}</h5>
<PublicKeyDetails publicKey={addAnonymityRevoker.arPublicKey} />
</div>
</>
);
}
|
<filename>user/forms/__init__.py
from .auth_forms import *
from .profile_form import *
|
#include <iostream>
int main(){
int num1,num2;
std::cin >> num1 >> num2;
int maxNum;
if(num1 > num2)
maxNum = num1;
else
maxNum = num2;
std::cout << maxNum << std::endl;
return 0;
} |
import React from 'react';
import { Progress } from 'algae-ui';
export default () => {
return (
<div className="progress-list">
<Progress percent={30} />
<Progress percent={50} />
<Progress percent={20} status="fail" />
<Progress percent={70} status="success" />
<Progress percent={50} showInfo={false} />
</div>
);
};
|
#!/usr/bin/env bash
# Copyright 2020 Jian Wu
# License: Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
set -eu
# data
train_data=/home/jwu/doc/data/aishell_v2/AISHELL-2
valid_data=/home/jwu/doc/data/aishell_v2/AISHELL-2-Eval-Test
dataset="aishell_v2"
nj=4
stage=1
am_exp=1a
gpu="0,1,2,3"
seed=777
tensorboard=false
prog_interval=100
eval_interval=4000
# for am
am_epochs=100
am_batch_size=256
am_num_workers=32
# decoding
ngram=5
beam_size=16
len_norm=false
nbest=$beam_size
lm_weight=0
ctc_weight=0
. ./utils/parse_options.sh || exit 1
data_dir=data/$dataset
exp_dir=exp/$dataset/$am_exp
beg=$(echo $stage | awk -F '-' '{print $1}')
end=$(echo $stage | awk -F '-' '{print $2}')
[ -z $end ] && end=$beg
if [ $end -ge 1 ] && [ $beg -le 1 ]; then
echo "Stage 1: preparing data ..."
local/prepare_data.sh $train_data/iOS/data $data_dir/local/train $data_dir/train
utils/wav_duration.py --output "time" --num-jobs $nj $data_dir/train/wav.scp $data_dir/train/utt2dur
for subset in DEV TEST; do
for subtype in ANDROID IOS MIC; do
name=$(echo ${subset}_${subtype} | tr '[:upper:]' '[:lower:]')
local/prepare_data.sh $valid_data/$subset/$subtype $data_dir/local/$name $data_dir/$name
utils/wav_duration.py --output "time" --num-jobs $nj $data_dir/$name/wav.scp $data_dir/$name/utt2dur
done
done
mkdir -p $data_dir/dev
for x in wav.scp utt2dur text; do cat $data_dir/dev_*/$x | sort -k1 > $data_dir/dev/$x; done
./utils/tokenizer.py $data_dir/train/text /dev/null \
--unit word --add-units "<sos>,<eos>,<unk>" --dump-vocab $data_dir/dict
fi
if [ $end -ge 2 ] && [ $beg -le 2 ]; then
echo "Stage 2: training AM ..."
./scripts/distributed_train.sh \
--gpu $gpu \
--seed $seed \
--epochs $am_epochs \
--batch-size $am_batch_size \
--num-workers $am_num_workers \
--tensorboard $tensorboard \
--prog-interval $prog_interval \
--eval-interval $eval_interval \
--dev-batch-factor 4 \
am $dataset $am_exp
fi
if [ $end -ge 3 ] && [ $beg -le 3 ]; then
echo "Stage 3: decoding ..."
# decoding
for name in {dev,test}_{android,ios,mic}; do
./scripts/decode.sh \
--gpu 0 \
--text $data_dir/$name/text \
--score true \
--beam-size $beam_size \
--nbest $nbest \
--max-len 50 \
--ctc-weight $ctc_weight \
--len-norm $len_norm \
--dict $exp_dir/dict \
$exp_dir $data_dir/$name/wav.scp \
$exp_dir/$name &
done
wait
fi
if [ $end -ge 4 ] && [ $beg -le 4 ]; then
echo "Stage 4: training ngram LM ..."
exp_dir=exp/$dataset/ngram && mkdir -p $exp_dir
cat $data_dir/train/text | awk '{$1=""; print}' > $exp_dir/train.text
lmplz -o $ngram --text $exp_dir/train.text --arpa $exp_dir/${ngram}gram.arpa
build_binary $exp_dir/${ngram}gram.arpa $exp_dir/${ngram}gram.arpa.bin
fi
if [ $end -ge 5 ] && [ $beg -le 5 ]; then
echo "Stage 5: decoding (ngram) ..."
for name in {dev,test}_{android,ios,mic}; do
dec_dir=${name}_${ngram}gram_$lm_weight
./scripts/decode.sh \
--score true \
--text data/$dataset/$name/text \
--gpu 0 \
--dict $exp_dir/dict \
--nbest $nbest \
--lm exp/$dataset/ngram/${ngram}gram.arpa.bin \
--lm-weight $lm_weight \
--max-len 50 \
--len-norm $len_norm \
--beam-size $beam_size \
--ctc-weight $ctc_weight \
--lm-weight $lm_weight \
$exp_dir $data_dir/$name/wav.scp \
$exp_dir/$dec_dir &
done
wait
fi
|
# import libraries
import numpy as np
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
# load the digits dataset
digits = datasets.load_digits()
# create features and target
X = digits.data
y = digits.target
# split the data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=1)
# Create KNN model
knn = KNeighborsClassifier()
# fit the model with data
knn.fit(X_train,y_train)
# predict the response of test dataset
pred = knn.predict(X_test)
# evaluate accuracy
print(accuracy_score(y_test, pred)) |
<gh_stars>0
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.curso.lambdas.interfaces;
/**
*
* @author Chema
* @param <Argumento>
* @param <Resultado>
*/
@FunctionalInterface
public interface Funcion<Argumento, Resultado> {
Resultado aplicar(Argumento a);
}
|
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.corelib.mixins;
import org.apache.tapestry5.Field;
import org.apache.tapestry5.MarkupWriter;
import org.apache.tapestry5.SymbolConstants;
import org.apache.tapestry5.ValidationDecorator;
import org.apache.tapestry5.annotations.Environmental;
import org.apache.tapestry5.annotations.HeartbeatDeferred;
import org.apache.tapestry5.annotations.InjectContainer;
import org.apache.tapestry5.dom.Element;
import org.apache.tapestry5.ioc.annotations.Inject;
import org.apache.tapestry5.ioc.annotations.Symbol;
/**
* Applied to a {@link org.apache.tapestry5.Field}, this provides the outer layers of markup to correctly
* render text fields, selects, and textareas using Bootstrap:
* an outer {@code <div class="field-group">} containing a {@code <label class="control-label">} and the field itself.
* Actually, the class attribute of the div is defined by the
* {@link SymbolConstants#FORM_GROUP_WRAPPER_CSS_CLASS} and
* the class attribute of label is defined by the {@link SymbolConstants#FORM_GROUP_LABEL_CSS_CLASS}.
* <code>field-group</code> and <code>control-label</code> are the default values.
* As with the {@link org.apache.tapestry5.corelib.components.Label} component, the {@code for} attribute is set (after the field itself
* renders).
*
*
* You can also use the {@link SymbolConstants#FORM_GROUP_FORM_FIELD_WRAPPER_ELEMENT_NAME} symbol
* to optionally wrap the input field in an element and {@link SymbolConstants#FORM_GROUP_FORM_FIELD_WRAPPER_ELEMENT_CSS_CLASS}
* to give it a CSS class. This is useful for Bootstrap form-horizontal forms.
* Setting {@link SymbolConstants#FORM_GROUP_FORM_FIELD_WRAPPER_ELEMENT_NAME} to <code>div</code>,
* {@link SymbolConstants#FORM_GROUP_FORM_FIELD_WRAPPER_ELEMENT_CSS_CLASS} to <code>col-sm-10</code>
* and {@link SymbolConstants#FORM_GROUP_LABEL_CSS_CLASS} to <code>col-sm-2</code>
* will generate labels 2 columns wide and form fields 10 columns wide.
*
*
* This component is not appropriate for radio buttons or checkboxes as they use a different class on the outermost element
* ("radio" or "checkbox") and next the element inside the {@code <label>}.
*
*
* @tapestrydoc
* @since 5.4
* @see SymbolConstants#FORM_GROUP_WRAPPER_CSS_CLASS
* @see SymbolConstants#FORM_GROUP_FORM_FIELD_WRAPPER_ELEMENT_NAME
* @see SymbolConstants#FORM_GROUP_FORM_FIELD_WRAPPER_ELEMENT_CSS_CLASS
* @see SymbolConstants#FORM_GROUP_LABEL_CSS_CLASS
* @see SymbolConstants#FORM_FIELD_CSS_CLASS
*/
public class FormGroup
{
@InjectContainer
private Field field;
@Inject
@Symbol(SymbolConstants.FORM_GROUP_LABEL_CSS_CLASS)
private String labelCssClass;
@Inject
@Symbol(SymbolConstants.FORM_GROUP_WRAPPER_CSS_CLASS)
private String divCssClass;
@Inject
@Symbol(SymbolConstants.FORM_GROUP_FORM_FIELD_WRAPPER_ELEMENT_NAME)
private String fieldWrapperElementName;
@Inject
@Symbol(SymbolConstants.FORM_GROUP_FORM_FIELD_WRAPPER_ELEMENT_CSS_CLASS)
private String fieldWrapperElementCssClass;
private Element label;
private Element fieldWrapper;
@Environmental
private ValidationDecorator decorator;
void beginRender(MarkupWriter writer)
{
writer.element("div", "class",
!("form-group".equals(divCssClass)) ? ("form-group" + " " + divCssClass) : divCssClass);
decorator.beforeLabel(field);
label = writer.element("label", "class", labelCssClass);
writer.end();
fillInLabelAttributes();
decorator.afterLabel(field);
if (fieldWrapperElementName.length() > 0) {
fieldWrapper = writer.element(fieldWrapperElementName);
if (fieldWrapperElementCssClass.length() > 0) {
fieldWrapper.attribute("class", fieldWrapperElementCssClass);
}
}
}
@HeartbeatDeferred
void fillInLabelAttributes()
{
label.attribute("for", field.getClientId());
label.text(field.getLabel());
}
void afterRender(MarkupWriter writer)
{
if (fieldWrapper != null) {
writer.end(); // field wrapper
}
writer.end(); // div.form-group
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-rare/7-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-rare/7-512+0+512-N-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_first_half_quarter --eval_function penultimate_quarter_eval |
//============================================================================
// Name : OneMillion.cpp
//
// Example of how floating point accuracy is related to precision
//
// ----------------------------------------------------------------------------------------------------
// ----------------------------------------------------------------------------------------------------
// Copyright (c) 2016 - RocketRedNeck
//
// RocketRedNeck and MIT License
//
// RocketRedNeck hereby grants license for others to copy and modify this source code for
// whatever purpose other's deem worthy as long as RocketRedNeck is given credit where
// where credit is due and you leave RocketRedNeck out of it for all other nefarious purposes.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
// ****************************************************************************************************
#include <stdio.h>
int main()
{
const float DX = 1.0e-6f;
float x = 0.0;
for (unsigned int i = 0; i < 1000000; ++i)
{
x += DX;
}
printf("DX = %20.17f x = %20.17f\n",DX,x);
const double DX2 = 1.0e-6;
double x2 = 0.0;
for (unsigned int i = 0; i < 1000000; ++i)
{
x2 += DX2;
}
printf("DX2 = %20.17f x2 = %20.17f\n",DX2,x2);
return 0;
}
|
<gh_stars>1-10
// Code generated by entc, DO NOT EDIT.
package ent
import (
"fmt"
"strings"
"github.com/blushft/strana/modules/sink/reporter/store/ent/page"
"github.com/facebook/ent/dialect/sql"
)
// Page is the model entity for the Page schema.
type Page struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// Hostname holds the value of the "hostname" field.
Hostname string `json:"hostname,omitempty"`
// Path holds the value of the "path" field.
Path string `json:"path,omitempty"`
// Referrer holds the value of the "referrer" field.
Referrer string `json:"referrer,omitempty"`
// Search holds the value of the "search" field.
Search string `json:"search,omitempty"`
// Title holds the value of the "title" field.
Title string `json:"title,omitempty"`
// Hash holds the value of the "hash" field.
Hash string `json:"hash,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the PageQuery when eager-loading is set.
Edges PageEdges `json:"edges"`
}
// PageEdges holds the relations/edges for other nodes in the graph.
type PageEdges struct {
// Events holds the value of the events edge.
Events []*Event
// loadedTypes holds the information for reporting if a
// type was loaded (or requested) in eager-loading or not.
loadedTypes [1]bool
}
// EventsOrErr returns the Events value or an error if the edge
// was not loaded in eager-loading.
func (e PageEdges) EventsOrErr() ([]*Event, error) {
if e.loadedTypes[0] {
return e.Events, nil
}
return nil, &NotLoadedError{edge: "events"}
}
// scanValues returns the types for scanning values from sql.Rows.
func (*Page) scanValues() []interface{} {
return []interface{}{
&sql.NullInt64{}, // id
&sql.NullString{}, // hostname
&sql.NullString{}, // path
&sql.NullString{}, // referrer
&sql.NullString{}, // search
&sql.NullString{}, // title
&sql.NullString{}, // hash
}
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the Page fields.
func (pa *Page) assignValues(values ...interface{}) error {
if m, n := len(values), len(page.Columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
value, ok := values[0].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
pa.ID = int(value.Int64)
values = values[1:]
if value, ok := values[0].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field hostname", values[0])
} else if value.Valid {
pa.Hostname = value.String
}
if value, ok := values[1].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field path", values[1])
} else if value.Valid {
pa.Path = value.String
}
if value, ok := values[2].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field referrer", values[2])
} else if value.Valid {
pa.Referrer = value.String
}
if value, ok := values[3].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field search", values[3])
} else if value.Valid {
pa.Search = value.String
}
if value, ok := values[4].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field title", values[4])
} else if value.Valid {
pa.Title = value.String
}
if value, ok := values[5].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field hash", values[5])
} else if value.Valid {
pa.Hash = value.String
}
return nil
}
// QueryEvents queries the events edge of the Page.
func (pa *Page) QueryEvents() *EventQuery {
return (&PageClient{config: pa.config}).QueryEvents(pa)
}
// Update returns a builder for updating this Page.
// Note that, you need to call Page.Unwrap() before calling this method, if this Page
// was returned from a transaction, and the transaction was committed or rolled back.
func (pa *Page) Update() *PageUpdateOne {
return (&PageClient{config: pa.config}).UpdateOne(pa)
}
// Unwrap unwraps the entity that was returned from a transaction after it was closed,
// so that all next queries will be executed through the driver which created the transaction.
func (pa *Page) Unwrap() *Page {
tx, ok := pa.config.driver.(*txDriver)
if !ok {
panic("ent: Page is not a transactional entity")
}
pa.config.driver = tx.drv
return pa
}
// String implements the fmt.Stringer.
func (pa *Page) String() string {
var builder strings.Builder
builder.WriteString("Page(")
builder.WriteString(fmt.Sprintf("id=%v", pa.ID))
builder.WriteString(", hostname=")
builder.WriteString(pa.Hostname)
builder.WriteString(", path=")
builder.WriteString(pa.Path)
builder.WriteString(", referrer=")
builder.WriteString(pa.Referrer)
builder.WriteString(", search=")
builder.WriteString(pa.Search)
builder.WriteString(", title=")
builder.WriteString(pa.Title)
builder.WriteString(", hash=")
builder.WriteString(pa.Hash)
builder.WriteByte(')')
return builder.String()
}
// Pages is a parsable slice of Page.
type Pages []*Page
func (pa Pages) config(cfg config) {
for _i := range pa {
pa[_i].config = cfg
}
}
|
<reponame>lsy26499/A_Week_Trip_server
import express from 'express';
import bestPlanList from '../controller/bestplanController/bestPlanList';
import bestPlan from '../controller/bestplanController/bestPlan';
const bestPlanRouter = express.Router();
bestPlanRouter.get('/', bestPlanList);
bestPlanRouter.get('/:num', bestPlan);
module.exports = bestPlanRouter;
|
#!/usr/bin/env bash
set -euo pipefail
build_dir=$PWD/build-$GOOS
mkdir -p build_dir
if [ -e "version/version" ]; then
version=$(cat version/version)
else
version="TESTVERSION"
fi
cd control-tower || exit 1
cp ../control-tower-ops/manifest.yml opsassets/assets/
cp -R ../control-tower-ops/ops opsassets/assets/
cp ../control-tower-ops/createenv-dependencies-and-cli-versions-aws.json opsassets/assets/
cp ../control-tower-ops/createenv-dependencies-and-cli-versions-gcp.json opsassets/assets/
GO111MODULE=on go build -mod=vendor -ldflags "
-X github.com/EngineerBetter/control-tower/fly.ControlTowerVersion=$version
-X main.ControlTowerVersion=$version
" -o "$build_dir/$OUTPUT_FILE"
|
_ban_bt_main() {
if [[ $ban_bt ]]; then
local _info="$green已开启$none"
else
local _info="$red已关闭$none"
fi
_opt=''
while :; do
echo
echo -e "$yellow 1. $none开启 BT 屏蔽"
echo
echo -e "$yellow 2. $none关闭 BT 屏蔽"
echo
echo -e "当前 BT 屏蔽状态: $_info"
echo
read -p "$(echo -e "请选择 [${magenta}1-2$none]:")" _opt
if [[ -z $_opt ]]; then
error
else
case $_opt in
1)
if [[ $ban_bt ]]; then
echo
echo -e " 大胸弟...难不成你没有看到 (当前 BT 屏蔽状态: $_info) 这个帅帅的提示么.....还开启个鸡鸡哦"
echo
else
echo
echo
echo -e "$yellow BT 屏蔽 = $cyan开启$none"
echo "----------------------------------------------------------------"
echo
pause
backup_config +bt
ban_bt=true
config
echo
echo
echo -e "$green BT 屏蔽已开启...如果出现异常..那就关闭它咯$none"
echo
fi
break
;;
2)
if [[ $ban_bt ]]; then
echo
echo
echo -e "$yellow BT 屏蔽 = $cyan关闭$none"
echo "----------------------------------------------------------------"
echo
pause
backup_config -bt
ban_bt=''
config
echo
echo
echo -e "$red BT 屏蔽已关闭...不过你也可以随时重新开启 ...只要你喜欢$none"
echo
else
echo
echo -e " 大胸弟...难不成你没有看到 (当前 BT 屏蔽状态: $_info) 这个帅帅的提示么.....还关闭个鸡鸡哦"
echo
fi
break
;;
*)
error
;;
esac
fi
done
}
|
<reponame>ghsecuritylab/bk7231_rtt_sdk<gh_stars>10-100
/*
* File : rt_ota_flash_sfud_port.c
* COPYRIGHT (C) 2012-2018, Shanghai Real-Thread Technology Co., Ltd
*
* Change Logs:
* Date Author Notes
* 2018-01-26 armink the first version
*/
#include <rt_ota_flash_dev.h>
#ifdef RT_OTA_FLASH_PORT_DRIVER_SFUD
#include <sfud.h>
extern sfud_flash sfud_norflash0;
static int read(uint32_t offset, uint8_t *buf, size_t size)
{
sfud_read(&sfud_norflash0, nor_flash0.addr + offset, size, buf);
return size;
}
static int write(uint32_t offset, const uint8_t *buf, size_t size)
{
if (sfud_write(&sfud_norflash0, nor_flash0.addr + offset, size, buf) != SFUD_SUCCESS)
{
return -1;
}
return size;
}
static int erase(uint32_t offset, size_t size)
{
if (sfud_erase(&sfud_norflash0, nor_flash0.addr + offset, size) != SFUD_SUCCESS)
{
return -1;
}
return size;
}
const struct rt_ota_flash_dev nor_flash0 = { "norflash0", 0, 8*1024*1024, {read, write, erase} };
#endif /* RT_OTA_FLASH_PORT_DRIVER_SFUD */
|
<reponame>cpa-bayarea/pope-api
from . import api
from flask import jsonify
@api.route('/organizations', methods=['GET'])
def get_organizations():
"""
Recupera informações sobre as organizações
---
default: all
responses:
200:
description: Recupera os dados das organizações
"""
data = {
'name': '<NAME>',
'tell': '(61) 3333 - 3333',
'email': '<EMAIL>',
'address': 'QNN 23 cj j casa 3',
'city': 'Ceilândia',
'description': 'Cartório e talz',
'free': 0,
'area': 'Jurídica',
'id_sub_area': 'Cartórios',
'attendance': {
'seg': '8 as 18h',
'ter': '8 as 18h',
'qua': '8 as 18h',
'qui': '8 as 18h',
'sex': '8 as 18h',
'sab': '8 as 18h',
'dom': '8 as 18h',
},
}
return jsonify(data)
|
SET randomNumCounter = 0
SET randomNumGenerator = 0
WHILE randomNumCounter < 100
randomNumGenerator = random number generator
PRINT randomNumGenerator
ADD randomNumCounter by 1
END WHILE |
#!/bin/bash
cd /usr/share/grafana.homebrew
grafana-server -config /etc/grafana.homebrew/grafana.ini cfg:default.paths.data=/var/lib/grafana.homebrew 1>/var/log/grafana.homebrew.log 2>&1
|
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
public class SimpleContainer {
private Map<Class<?>, Object> beans = new HashMap<>();
public void register(Class<?> clazz) {
if (!beans.containsKey(clazz)) {
try {
Object instance = clazz.getDeclaredConstructor().newInstance();
beans.put(clazz, instance);
} catch (Exception e) {
e.printStackTrace();
}
}
}
public <T> T getBean(Class<T> clazz) {
if (beans.containsKey(clazz)) {
T instance = (T) beans.get(clazz);
resolveDependencies(instance);
invokePostConstruct(instance);
return instance;
}
return null;
}
public void destroyBean(Object bean) {
invokePreDestroy(bean);
beans.values().removeIf(value -> value.equals(bean));
}
private <T> void resolveDependencies(T instance) {
for (Method method : instance.getClass().getDeclaredMethods()) {
if (method.isAnnotationPresent(Autowired.class)) {
Class<?>[] parameterTypes = method.getParameterTypes();
Object[] dependencies = new Object[parameterTypes.length];
for (int i = 0; i < parameterTypes.length; i++) {
dependencies[i] = beans.get(parameterTypes[i]);
}
try {
method.setAccessible(true);
method.invoke(instance, dependencies);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
private void invokePostConstruct(Object instance) {
for (Method method : instance.getClass().getDeclaredMethods()) {
if (method.isAnnotationPresent(PostConstruct.class)) {
try {
method.setAccessible(true);
method.invoke(instance);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
private void invokePreDestroy(Object instance) {
for (Method method : instance.getClass().getDeclaredMethods()) {
if (method.isAnnotationPresent(PreDestroy.class)) {
try {
method.setAccessible(true);
method.invoke(instance);
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
} |
#!/bin/sh
docker build --rm -t $DREPO .
echo "$DOCKER_PASS" | docker login -u "$DOCKER_USER" --password-stdin
docker push $DREPO |
def find_closest_to_average(input_array):
"""Find and return the one that is closest to the average of the array"""
avg = sum(input_array) / len(input_array)
min_diff = float('inf')
result = 0
for num in input_array:
diff = abs(num - avg)
if diff < min_diff:
min_diff = diff
result = num
return result |
#!/bin/sh
SCRIPT="$0"
echo "# START SCRIPT: $SCRIPT"
while [ -h "$SCRIPT" ] ; do
ls=`ls -ld "$SCRIPT"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=`dirname "$SCRIPT"`/"$link"
fi
done
if [ ! -d "${APP_DIR}" ]; then
APP_DIR=`dirname "$SCRIPT"`/..
APP_DIR=`cd "${APP_DIR}"; pwd`
fi
executable="./modules/openapi-generator-cli/target/openapi-generator-cli.jar"
if [ ! -f "$executable" ]
then
mvn -B clean package
fi
# if you've executed sbt assembly previously it will use that instead.
export JAVA_OPTS="${JAVA_OPTS} -XX:MaxPermSize=256M -Xmx1024M -DloggerPath=conf/log4j.properties"
ags="generate -t modules/openapi-generator/src/main/resources/scala-play-framework -i modules/openapi-generator/src/test/resources/2_0/petstore.yaml -g scala-play-framework -o samples/server/petstore/scala-play-framework $@"
java $JAVA_OPTS -jar $executable $ags
|
import bs4
from bs4 import BeautifulSoup
html = '''<table>
<tr>
<th>Name</th>
<th>Age</th>
<th>Location</th>
</tr>
<tr>
<td>Jane Doe</td>
<td>22</td>
<td>Paris</td>
</tr>
<tr>
<td>John Smith</td>
<td>25</td>
<td>London</td>
</tr>
</table>'''
soup = BeautifulSoup(html, 'html.parser')
table = soup.find('table')
th = table.find_all('th')
headers = [i.text for i in th]
data = []
for tr in table.find_all('tr'):
tds = tr.find_all('td')
row = [i.text for i in tds]
if row:
data.append(dict(zip(headers, row)))
print(data) |
#!/bin/bash
# Copyright 2015 Midokura SARL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script is meant to be sourced from devstack. It is a wrapper of
# devmido scripts that allows proper exporting of environment variables.
function install_taas {
setup_develop $TAAS_PLUGIN_PATH
}
function configure_taas_plugin {
cp $TAAS_PLUGIN_PATH/etc/taas_plugin.ini $TAAS_PLUGIN_CONF_FILE
neutron_server_config_add $TAAS_PLUGIN_CONF_FILE
neutron_service_plugin_class_add taas
neutron_deploy_rootwrap_filters $TAAS_PLUGIN_PATH
}
if is_service_enabled taas; then
if [[ "$1" == "stack" ]]; then
if [[ "$2" == "pre-install" ]]; then
:
elif [[ "$2" == "install" ]]; then
install_taas
elif [[ "$2" == "post-config" ]]; then
configure_taas_plugin
if is_service_enabled q-svc neutron-api; then
neutron-db-manage --subproject tap-as-a-service upgrade head
fi
echo "Configuring taas"
if [ "$TAAS_SERVICE_DRIVER" ]; then
inicomment $TAAS_PLUGIN_CONF_FILE service_providers service_provider
iniadd $TAAS_PLUGIN_CONF_FILE service_providers service_provider $TAAS_SERVICE_DRIVER
fi
elif [[ "$2" == "extra" ]]; then
:
fi
elif [[ "$1" == "unstack" ]]; then
:
fi
fi
if is_service_enabled q-agt neutron-agent; then
if [[ "$1" == "stack" ]]; then
if [[ "$2" == "pre-install" ]]; then
:
elif [[ "$2" == "install" ]]; then
install_taas
elif [[ "$2" == "post-config" ]]; then
if is_service_enabled q-agt neutron-agent; then
source $NEUTRON_DIR/devstack/lib/l2_agent
plugin_agent_add_l2_agent_extension taas
configure_l2_agent
fi
elif [[ "$2" == "extra" ]]; then
:
fi
elif [[ "$1" == "unstack" ]]; then
:
fi
fi
|
<reponame>togiter/RRWallet
package com.renrenbit.rrwallet.service.push;
import android.app.Activity;
import android.app.Application;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.Utils;
import com.renrenbit.rrwallet.BuildConfig;
import com.renrenbit.rrwallet.service.statistics.UmengAnalyticsAgent;
import com.renrenbit.rrwallet.utils.DeviceUuidFactory;
import com.facebook.react.bridge.Callback;
import com.umeng.commonsdk.UMConfigure;
import com.umeng.message.IUmengRegisterCallback;
import com.umeng.message.PushAgent;
import com.umeng.message.UHandler;
import com.umeng.message.UTrack;
import com.umeng.message.entity.UMessage;
import org.android.agoo.huawei.HuaWeiRegister;
import org.android.agoo.xiaomi.MiPushRegistar;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Vector;
/**
* Created by jackQ on 2018/6/18.
*/
public class PushManager {
private static PushManager sInstance;
private final Vector<PushEvent> mMessageCacheVector = new Vector<>();
public PushEventListener mListeners = null;
ActivityHookService mHook = new ActivityHookService();
private PushAgent mPushAgent;
private PushRegisterCallback mRegisterCallback;
private boolean mRegisterFinished;
private boolean mRegisterSuccess;
private String s;
private String s1;
private Callback mMessageHandler;
private PushManager() {
initUmengCommon();
registerAnalytics();
registerPush();
}
private void registerAnalytics() {
UmengAnalyticsAgent.init(Utils.getApp());
}
public static PushManager inst() {
if (sInstance == null) {
synchronized (PushManager.class) {
if (sInstance == null) {
sInstance = new PushManager();
}
}
}
return sInstance;
}
private static Intent getLaunchIntentForPackage(Context context, String packageName) {
Intent intent = context.getPackageManager().getLaunchIntentForPackage(packageName);
if (intent == null)
return null;
// MIUI 2.3 did not set CATEGORY_LAUNCHER
if (!intent.hasCategory(Intent.CATEGORY_LAUNCHER)) {
intent.addCategory(Intent.CATEGORY_LAUNCHER);
}
// set package to null and add flags so this intent has same
// behavior with app launcher
intent.setPackage(null);
intent.addFlags(Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
return intent;
}
private void initUmengCommon() {
UMConfigure.setLogEnabled(BuildConfig.DEBUG);
UMConfigure.setEncryptEnabled(true);
}
private void registerPush() {
HuaWeiRegister.register(Utils.getApp());
mPushAgent = PushAgent.getInstance(Utils.getApp());
mPushAgent.setPushCheck(BuildConfig.DEBUG);
mPushAgent.setMessageHandler(new MessageHandler());
mPushAgent.addAlias(DeviceUuidFactory.inst().getDeviceUuid(), "device_id", new UTrack.ICallBack() {
@Override
public void onMessage(boolean isSuccess, String message) {
LogUtils.d("onMessage : isSuccess" + isSuccess + " , message : " + message);
}
});
//注册推送服务,每次调用register方法都会回调该接口
mPushAgent.register(new IUmengRegisterCallback() {
@Override
public void onSuccess(final String deviceToken) {
LogUtils.dTag(PushManager.class.getSimpleName(), "register push success,deviceToken :" + deviceToken);
mRegisterFinished = true;
mRegisterSuccess = true;
if (mRegisterCallback != null) {
mRegisterCallback.onSuccess(deviceToken);
}
}
@Override
public void onFailure(final String s, final String s1) {
LogUtils.dTag(PushManager.class.getSimpleName(), "register push failed,deviceToken : s:" + s + ", s1 : " + s1);
mRegisterFinished = true;
mRegisterSuccess = false;
PushManager.this.s = s;
PushManager.this.s1 = s1;
if (mRegisterCallback != null) {
mRegisterCallback.onFailure(s, s1);
}
}
});
Utils.getApp().registerActivityLifecycleCallbacks(new Application.ActivityLifecycleCallbacks() {
@Override
public void onActivityCreated(Activity activity, Bundle savedInstanceState) {
mHook.onActivityCreated(activity, savedInstanceState);
}
@Override
public void onActivityStarted(Activity activity) {
mHook.onActivityStarted(activity);
}
@Override
public void onActivityResumed(Activity activity) {
mHook.onActivityResumed(activity);
}
@Override
public void onActivityPaused(Activity activity) {
mHook.onActivityPaused(activity);
}
@Override
public void onActivityStopped(Activity activity) {
mHook.onActivityStopped(activity);
}
@Override
public void onActivitySaveInstanceState(Activity activity, Bundle outState) {
mHook.onActivitySaveInstanceState(activity, outState);
}
@Override
public void onActivityDestroyed(Activity activity) {
mHook.onActivityDestroyed(activity);
}
});
mPushAgent.setNotificationClickHandler(new UHandler() {
@Override
public void handleMessage(Context context, UMessage uMessage) {
Map<String, String> extra = uMessage.extra;
String custom = uMessage.custom;
String title = uMessage.title;
String text = uMessage.text;
PushEvent event = new PushEvent(extra, custom, title, text);
PushManager.this.handleMessage(context, event);
}
});
}
public void setRegisterCallback(PushRegisterCallback callback) {
if (callback == null) {
mRegisterCallback = null;
return;
}
if (mRegisterFinished) {
if (mRegisterSuccess) {
callback.onSuccess(mPushAgent.getRegistrationId());
} else {
callback.onFailure(s, s1);
}
} else {
mRegisterCallback = callback;
}
}
public void handleMessage(Context context, PushEvent pushEvent) {
LogUtils.dTag("PushManager", "handleMessage : " + pushEvent);
openRNActivityIfNeeded(Utils.getApp());
if (mListeners != null) {
mListeners.onEvent(pushEvent);
} else {
synchronized (mMessageCacheVector) {
mMessageCacheVector.add(pushEvent);
}
}
}
public void onReceiveMessage(Context context, PushEvent pushEvent) {
if (mListeners != null) {
mListeners.onReceive(pushEvent);
}
}
private void openRNActivityIfNeeded(Context context) {
Intent intent = getLaunchIntentForPackage(Utils.getApp(), Utils.getApp().getPackageName());
if (intent != null) {
Utils.getApp().startActivity(intent);
}
}
public void registerPushEventListener(PushEventListener listener) {
mListeners = listener;
List<PushEvent> messages = new ArrayList<>();
synchronized (mMessageCacheVector) {
if (!mMessageCacheVector.isEmpty()) {
messages.addAll(mMessageCacheVector);
mMessageCacheVector.clear();
}
}
for (PushEvent msg : messages) {
mListeners.onEvent(msg);
}
}
public void removePushEventListener(PushEventListener listener) {
mListeners = null;
}
public interface PushRegisterCallback {
void onSuccess(String deviceToken);
void onFailure(String s, String s1);
}
public interface PushEventListener {
void onEvent(PushEvent event);
void onReceive(PushEvent event);
}
}
|
<filename>packages/bench/src/suites/fetch-global-data.ts<gh_stars>1-10
const query = `query globalData {
feature {
questionTranslation
subscription
signUp
discuss
mockInterview
contest
store
book
chinaProblemDiscuss
socialProviders
studentFooter
enableChannels
dangerZone
cnJobs
cnAddons
__typename
}
userStatus {
isSignedIn
isAdmin
isStaff
isSuperuser
isTranslator
isPremium
isVerified
checkedInToday
username
realName
userSlug
avatar
optedIn
requestRegion
region
activeSessionId
permissions
notificationStatus {
lastModified
numUnread
__typename
}
completedFeatureGuides
__typename
}
}
`
export const fetchGlobalData = () =>
fetch('/graphql', {
body: JSON.stringify({
query,
variables: null,
}),
headers: {
'Content-Type': 'application/json',
},
method: 'POST',
})
|
package com.bjpowernode.crm.settings.dao;
import com.bjpowernode.crm.settings.domain.DictionaryValue;
import java.util.List;
public interface DictionaryValueDao {
int findCountByTypeCode(String code);
List<DictionaryValue> findAll();
}
|
import java.util.ArrayList;
public class BookLibrary {
private static ArrayList<Book> library = new ArrayList<>();
public static void addBook(Book book) {
library.add(book);
}
public static Book searchBook(String title) {
for (Book book : library) {
if (book.getTitle().equalsIgnoreCase(title)) {
return book;
}
}
return null;
}
public static void checkOut(Book book) {
book.setCheckedOut(true);
}
public static void returnBook(Book book) {
book.setCheckedOut(false);
}
}
class Book {
private String title;
private String author;
private boolean checkedOut;
public Book(String title, String author) {
this.title = title;
this.author = author;
this.checkedOut = false;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getAuthor() {
return author;
}
public void setAuthor(String author) {
this.author = author;
}
public boolean isCheckedOut() {
return checkedOut;
}
public void setCheckedOut(boolean checkedOut) {
this.checkedOut = checkedOut;
}
@Override
public String toString() {
return "Book{" +
"title='" + title + '\'' +
", author='" + author + '\'' +
", checkedOut=" + checkedOut +
'}';
}
} |
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
import os
from braket.jobs import save_job_result
from braket.jobs.metrics import log_metric
import json
import networkx as nx
# Load community detection specific library
from src.qbsolv_community import QbsolvCommunity
def main():
# Print statements can be viewed in cloudwatch
print(os.environ)
input_dir = os.environ["AMZN_BRAKET_INPUT_DIR"]
hp_file = os.environ["AMZN_BRAKET_HP_FILE"]
job_name = os.environ["AMZN_BRAKET_JOB_NAME"]
s3_bucket = os.environ["AMZN_BRAKET_OUT_S3_BUCKET"]
device_arn = os.environ["AMZN_BRAKET_DEVICE_ARN"]
# Read the hyperparameters
with open(hp_file, "r") as f:
hyperparams = json.load(f)
print(hyperparams)
# Graph related parameters
input_graph_file = str(hyperparams["input_graph_file"]).strip('"')
k = int(hyperparams["num_community"])
# QBSolv related parameters
solver_mode = str(hyperparams["solver_mode"]).strip('"')
solver_limit = int(hyperparams["solver_limit"])
num_repeats = int(hyperparams["num_repeats"])
num_reads = int(hyperparams["num_reads"])
seed = int(hyperparams["seed"])
alpha = int(hyperparams["alpha"])
print(f"Load graph file from {input_dir}/input-graph/{input_graph_file}")
nx_G = nx.read_weighted_edgelist(
f"{input_dir}/input-graph/{input_graph_file}",
delimiter=None, # check the input graph file and update the delimiter here
create_using=nx.Graph(),
nodetype=int)
print(f"Input graph information: {nx.info(nx_G)}")
# Initialize QbsolvCommunity class
qbsolv_comm = QbsolvCommunity(nx_G, solver_limit, num_repeats, num_reads, seed, alpha)
if solver_mode == "classical":
print("Executing QBSolv Classical solver for community detection")
comm_results, qbsolv_output = qbsolv_comm.solve_classical(k)
elif solver_mode == "hybrid":
# QBSolv Hybrid solver specific input
s3_task_prefix = f"jobs/{job_name}/tasks" # the folder name in the S3 braket bucket to save QBSolv task output
s3_folder = (s3_bucket, s3_task_prefix)
print("Executing QBSolv Hybrid solver for community detection")
comm_results, qbsolv_output = qbsolv_comm.solve_hybrid(k, s3_folder, device_arn, ack_QPUcost=True)
else:
raise ValueError(f"Invalid qbsolv solver mode {solver_mode}. Solver mode has to be in ['classical', 'hybrid']!")
log_metric(
metric_name="Modularity",
value=comm_results["modularity"],
)
# We're done with the job, so save the result.
# This will be returned in job.result()
print('Save results')
save_job_result({"community_results": str(comm_results), "hyperparams": str(hyperparams), "qbsolv_output": str(qbsolv_output)})
if __name__ == "__main__":
main() |
<reponame>smagill/opensphere-desktop<gh_stars>10-100
/**
*
*/
package io.opensphere.core.common.shapefile;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.FileChannel;
import java.nio.channels.NonWritableChannelException;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import javax.activation.UnsupportedDataTypeException;
import io.opensphere.core.common.shapefile.ESRIShapefile.Mode;
import io.opensphere.core.common.shapefile.shapes.MeasureMinMax;
import io.opensphere.core.common.shapefile.shapes.ShapeRecord;
import io.opensphere.core.common.shapefile.shapes.ShapeRecord.ShapeType;
import io.opensphere.core.common.shapefile.shapes.ZMinMax;
import io.opensphere.core.common.shapefile.utils.ShapeHeader;
/**
* This class represents the "main" (shp) portion of an ESRI Shapefile.
*
* @deprecated Deprecated to com.bitsys.common.shapefile.v2.MainPortion
* @see io.opensphere.core.common.shapefile.v2.main.MainPortion
*/
@Deprecated
class MainPortion implements Collection<ShapeRecord>
{
// Contents
public class MainPortionRecord
{
/* Byte Position Field Value Type Order Byte 0 Record Number Record
* Number Integer Big Byte 4 Content Length Content Length Integer Big
*
* Content Length is # of 16bit words. */
private int recordNumber;
private int contentLength;
private ShapeRecord record;
/**
* Constructor. Intentionally does nothing. Use parse() functions to
* force the file to be read.
*/
public MainPortionRecord()
{
}
/**
* Constructor. Setting record
**/
public MainPortionRecord(ShapeRecord rec)
{
record = rec;
}
/**
* Returns the record number.
*
* @return the recordNumber.
*/
public int getRecordNumber()
{
return recordNumber;
}
/**
* Returns the content length.
*
* @return the contentLength.
*/
public int getContentLength()
{
return contentLength;
}
/**
* Returns the <code>ShapeRecord</code> instance.
*
* @return the shape record.
*/
public ShapeRecord getRecord()
{
return record;
}
public void setRecord(ShapeRecord rec)
{
record = rec;
}
/**
* Forces the parsing of the file.
*
* @return Returns false if parsing fails in a way that doesn't throw an
* exception.
* @throws InstantiationException
* @throws IllegalAccessException
* @throws SecurityException
* @throws NoSuchMethodException
* @throws InvocationTargetException
* @throws IllegalArgumentException
*/
public boolean parseRecord(ByteBuffer buffer) throws InstantiationException, IllegalAccessException,
IllegalArgumentException, InvocationTargetException, NoSuchMethodException, SecurityException
{
boolean returnValue = true;
buffer.order(ByteOrder.BIG_ENDIAN);
recordNumber = buffer.getInt();
contentLength = buffer.getInt();
ShapeType type = ShapeType.getInstance(header.shapeType);
if (type != null)
{
record = type.getShapeRecordInstance();
returnValue = record.parseRecord(buffer);
}
else
{
returnValue = false;
}
return returnValue;
}
public boolean writeRecord(int recNumber, FileChannel channel) throws IOException
{
boolean returnValue = true;
// System.out.println("Starting Record " + recNumber + " At Pos: " + channel.position() );
channel.write(getAsByteBuffer(recNumber));
return returnValue;
}
public ByteBuffer getAsByteBuffer(int recNumber)
{
ByteBuffer buff = ByteBuffer.allocate(record.getLengthInBytes() + 8);
buff.order(ByteOrder.BIG_ENDIAN);
buff.putInt(recNumber);
buff.putInt(record.getContentLengthInWords());
record.writeRecord(buff);
buff.flip();
return buff;
}
}
public class MainPortionIterator implements Iterator<ShapeRecord>
{
MainPortion parent = null;
FileChannel thisFile = null;
int nextRecord = 0;
public MainPortionIterator(MainPortion mp)
{
try
{
parent = mp;
thisFile = mp.inputStream.getChannel();
// reset position
if (!index.records.isEmpty())
{
thisFile.position(index.records.get(0).offset);
}
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public boolean hasNext()
{
return nextRecord < parent.size();
}
@Override
public ShapeRecord next()
{
MainPortionRecord record = new MainPortionRecord();
try
{
thisFile.position(2 * parent.index.records.get(nextRecord).offset);
int bytesToAllocate = 2 * (index.records.get(nextRecord).contentLength + 4);
// Offset units is # of 16bit words
// Extra 4 words is for the mainportionrecord headers around the
// shaperecord
ByteBuffer buffer = ByteBuffer.allocate(bytesToAllocate);
thisFile.read(buffer);
buffer.flip();
record.parseRecord(buffer);
nextRecord++;
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
catch (ReflectiveOperationException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
return record.getRecord();
}
@Override
public void remove()
{
throw new UnsupportedOperationException();
}
}
private final ESRIShapefile.Mode mode;
private File file = null;
private FileInputStream inputStream = null;
private FileOutputStream outputStream = null;
private ShapeHeader header = null;
private IndexPortion index = null;
public MainPortion(ESRIShapefile.Mode mode, String filePath) throws FileNotFoundException
{
this.mode = mode;
file = new File(filePath);
if (mode == ESRIShapefile.Mode.READ)
{
inputStream = new FileInputStream(file);
header = new ShapeHeader();
}
else // if (mode == ESRIShapefile.Mode.WRITE)
{
outputStream = new FileOutputStream(file);
header = new ShapeHeader();
}
}
public ShapeHeader getHeader()
{
return header;
}
/**
* Creates the main portion for this ShapeFile, an IndexPortion and Header
* is auto-generated to match the MainPortionRecord[] and Header.
*
* @param header
* @param records
* @throws UnsupportedDataTypeException if the record types in the array are
* not yet supported
* @throws IllegalArgumentException if records does not contain at least one
* record or if the list of records does not contain all of the
* same type of shape.
* @throws NullPointerException if records is null or any value in records
* is null
*/
/* public MainPortion( List<ShapeRecord> records ) { setShapes(records);
* } */
public void addIndex(IndexPortion index)
{
this.index = index;
}
/* /** Writes the main portion to the two associated files .shp and .shx
* written as filePath/fileNamePrefix.shp and filePath/fileNamePrefix.shx
*
* @param filePath - path for the file
*
* @param fileNamePrefix - the file name prefix
*
* @return true if successful
*
* @throws IOException
*
* @throws FileNotFoundException *//* public boolean writeToFiles( String
* filePath, String fileNamePrefix )
* throws IOException,
* FileNotFoundException { boolean
* returnValue = true;
*
* if ( this.index == null && records !=
* null ) setShapes(records);
*
* File shpFile = new File( filePath +
* File.separator + fileNamePrefix +
* ".shp"); File shxFile = new File(
* filePath + File.separator +
* fileNamePrefix + ".shx");
*
* // TODO: Write the main file.
* FileOutputStream shpFOS = new
* FileOutputStream(shpFile); FileChannel
* shpChannel = shpFOS.getChannel(); try
* { // System.out.
* println("Writing Main File: ");
* this.header.writeHeader(shpChannel);
* // TODO clean up inefficiencies for (
* int i = 0; i < records.size(); i++ )
* records.get(i).writeRecord(i+1,
* shpChannel); } finally {
* shpChannel.close(); }
*
* // Write out the index file
* FileOutputStream shxFOS = new
* FileOutputStream(shxFile);
* this.index.writeFile(shxFOS);
*
* return returnValue; }
*
* /** Gets an array of byte buffers.
*
* @return ByteBuffer[] always length of
* 2 Index 0: shp ByteBuffer Index 1: shx
* ByteBuffer *//* public ByteBuffer[]
* getAsByteBuffer() {
* ByteBuffer[] buffArray
* = new ByteBuffer[2];
* int allocateSize =
* ShapeHeader.
* HEADER_SIZE;
* List<ByteBuffer>
* recordBuffs = new
* ArrayList<ByteBuffer>(
* ); //Capture all of
* the record byte
* buffers so that we can
* //calculate an
* allocation size, and
* add to the overall
* //shpBuffer
*
* // TODO Do we really
* need all this copying?
* for ( int i = 0; i <
* records.size(); i++ )
* { ByteBuffer
* recordBuff =
* records.get(i).
* getAsByteBuffer(i+1);
* recordBuffs.add(
* recordBuff);
* allocateSize +=
* recordBuff.limit(); }
* ByteBuffer shpBuffer =
* ByteBuffer.allocate(
* allocateSize);
* shpBuffer.put(this.
* header.getAsByteBuffer
* ()); for (ByteBuffer
* recordBuff:
* recordBuffs) {
* shpBuffer.put(
* recordBuff); }
* shpBuffer.flip();
* buffArray[0] =
* shpBuffer;
*
* //Add the shx byte
* buffer buffArray[1] =
* this.index.
* getAsByteBuffer();
* return buffArray; } */
boolean doPrep() throws IOException
{
boolean returnValue = true;
if (mode == ESRIShapefile.Mode.READ)
{
ByteBuffer buffer = ByteBuffer.allocate(ShapeHeader.HEADER_SIZE);
inputStream.getChannel().read(buffer);
buffer.flip();
returnValue = header.parseHeader(buffer);
}
else // if mode == WRITE
{
// stuff
}
// System.out.println("Header File Length: " + header.fileLength);
return returnValue;
}
InputStream getAsInputStream() throws IOException
{
if (mode == Mode.READ)
{
inputStream.getChannel().position(0);
return inputStream;
}
return null;
}
InputStream getIndexAsInputStream() throws IOException
{
if (mode == Mode.READ && index != null)
{
return index.getAsInputStream();
}
return null;
}
/**
* Deletes the file managed by this class.
*
* @throws IOException if an error occurs while closing the input/output
* streams.
*/
public void delete() throws IOException
{
if (file != null && file.exists())
{
// Close the file descriptors first.
try
{
close();
}
finally
{
try
{
index.delete();
}
finally
{
// If unable to delete the file, delete it on exit.
if (!file.delete())
{
file.deleteOnExit();
}
}
}
}
}
public void close() throws IOException
{
if (index != null)
{
index.close();
}
if (inputStream != null)
{
inputStream.close();
}
if (outputStream != null)
{
outputStream.close();
}
}
public boolean writeHeader() throws IOException
{
boolean returnValue = true;
if (mode == ESRIShapefile.Mode.WRITE)
{
FileChannel channel = outputStream.getChannel();
channel.position(0);
ByteBuffer buffer = ByteBuffer.allocate(ShapeHeader.HEADER_SIZE);
returnValue = header.writeHeader(buffer);
buffer.flip();
channel.write(buffer);
}
return returnValue;
}
public boolean writeIndex() throws IOException
{
return index.writeRecords(header);
}
/* public List<ShapeRecord> parseRecords() throws IOException,
* InstantiationException, IllegalAccessException { List<ShapeRecord>
* returnList = new LinkedList<ShapeRecord>();
* parseRecords(Integer.MAX_VALUE, returnList ); return returnList; }
*
* public void parseRecords(int numRecords, List<ShapeRecord> returnList )
* throws IOException, InstantiationException, IllegalAccessException {
* FileChannel channel = inputStream.getChannel();
*
* if (index != null && index.records != null ) { // I have an index, read
* optimally. // This is only going to be important for HUGE shapefiles. //
* For most shapefiles, might be able to get away with loading the whole
* file. int recordsLeft = index.records.length - readPointer;
*
* System.out.println("records: " + (index.records.length - readPointer));
* while ( recordsLeft > 0 && returnList.size() < numRecords ) { int
* recordsThisRead = Math.min(Math.min(numRecords, recordsLeft),
* arbitraryReadSize);
*
* System.out.println("recordsThisRead:" + recordsThisRead); int
* bytesToAllocate = 2*(
* (index.records[(readPointer+recordsThisRead)-1].offset -
* index.records[readPointer].offset) +
* index.records[(readPointer+recordsThisRead)-1].contentLength+4); //Offset
* units is # of 16bit words
*
* ByteBuffer buffer = ByteBuffer.allocate(bytesToAllocate);
* channel.read(buffer); buffer.flip(); for (int i = 0; i < recordsThisRead;
* i++) { MainPortionRecord record = new MainPortionRecord(); if (
* record.parseRecord(buffer) ) returnList.add(record.getRecord()); }
* recordsLeft = recordsLeft - recordsThisRead; readPointer = readPointer +
* recordsThisRead; System.out.println("recordsLeft: " + recordsLeft +
* " readPointer: " + readPointer); } }
*
*
* // System.out.println("Records: " + records.length ); } */
// Convenience function for any case where we have the shapes, and not the
// Full shape record. Shape record has the extra 8 bytes for record number
// and
// content length
/* public void setShapes( List<ShapeRecord> shapeList ) { if ( shapeList ==
* null ) throw new NullPointerException();
*
* if ( shapeList.size() <= 0 ) throw new IllegalArgumentException();
*
* // Explode out shapes into shape records LinkedList<MainPortionRecord>
* shapeRecords = new LinkedList<MainPortionRecord>(); for ( int i = 0; i <
* shapeList.size(); i++ ) shapeRecords.add( new
* MainPortionRecord(shapeList.get(i)) );
*
* setShapes(shapeRecords); } */
public void checkHeader(ShapeRecord record)
{
if (header == null)
{
header = new ShapeHeader();
}
if (header.shapeType == 0)
{
header.shapeType = record.getShapeType();
}
// Must be same type throughout file. If it doesn't match,
// throw something compatible with Collection.add()
if (header.shapeType != record.getShapeType())
{
throw new IllegalArgumentException();
}
}
/**
* Update the header based upon this record
*
* @param record The record to update from
*/
public void updateHeader(ShapeRecord record)
{
// Shape header is functioning in bytes
header.fileLength += 8 + record.getLengthInBytes();
// Adjust bbox
double[] shapeBBox = record.getBox();
// Modify summary bounding boxes if necessary
if (shapeBBox != null)
{
// Adjust bounding region if necessary
if (shapeBBox[0] < header.bbox[0])
{
header.bbox[0] = shapeBBox[0];
}
if (shapeBBox[2] > header.bbox[2])
{
header.bbox[2] = shapeBBox[2];
}
if (shapeBBox[1] < header.bbox[1])
{
header.bbox[1] = shapeBBox[1];
}
if (shapeBBox[3] > header.bbox[3])
{
header.bbox[3] = shapeBBox[3];
}
}
// Modify Z bounds if necessary
if (record instanceof ZMinMax)
{
ZMinMax recZ = (ZMinMax)record;
if (recZ.getZMin() < header.bbox[4])
{
header.bbox[4] = recZ.getZMin();
}
if (recZ.getZMax() > header.bbox[5])
{
header.bbox[5] = recZ.getZMax();
}
}
else
{
if (header.bbox[4] != 0)
{
header.bbox[4] = 0;
}
if (header.bbox[5] != 0)
{
header.bbox[5] = 0;
}
}
// Modify measurment bounds if necessary
if (record instanceof MeasureMinMax)
{
MeasureMinMax recM = (MeasureMinMax)record;
if (recM.getMeasurementMin() < header.bbox[6])
{
header.bbox[6] = recM.getMeasurementMin();
}
if (recM.getMeasurementMax() > header.bbox[7])
{
header.bbox[7] = recM.getMeasurementMax();
}
}
else
{
if (header.bbox[6] != 0)
{
header.bbox[6] = 0;
}
if (header.bbox[7] != 0)
{
header.bbox[7] = 0;
}
}
}
/* *//**
* Sets the main portion for this ShapeFile, an IndexPortion and
* Header are auto-generated to match the MainPortionRecord list and
* Header.
*
* @param header
* @param records
* @throws UnsupportedDataTypeException if the record types in the
* array are not yet supported
* @throws IllegalArgumentException if records does not contain at
* least one record or if the list of records does not
* contain all of the same type of shape.
* @throws NullPointerException if records is null or any value in
* records is null
*//* public void setShapes( LinkedList<MainPortionRecord> shapes )
* { if ( shapes == null ) throw new NullPointerException();
*
* if ( shapes.size() <= 0 ) throw new
* IllegalArgumentException();
*
* records = (LinkedList<MainPortionRecord>) shapes.clone();
*
* int shapeType =
* records.peekFirst().getRecord().getShapeType();
*
* // Assume all shapeType are the same since that is the
* standard. // To store the bounding box for the header double[]
* bbox = new double[8];
*
* // Prep the bounding box values // Xmin bbox[0] =
* Double.MAX_VALUE; // Ymin bbox[1] = Double.MAX_VALUE; // Xmax
* bbox[2] = Double.MIN_VALUE; // Ymax bbox[3] =
* Double.MIN_VALUE; bbox[4] = 0.0; bbox[5] = 0.0; bbox[6] = 0.0;
* bbox[7] = 0.0;
*
* // Prep Z Min and Max only if we're using a Z type shape. if (
* shapeType == 11 || shapeType == 13 || shapeType == 15 ||
* shapeType == 18) { // Zmin bbox[4] = Double.MAX_VALUE; // Zmax
* bbox[5] = Double.MIN_VALUE; }
*
* // Prep Do M Min and Max only if we're using a M type shape.
* if ( shapeType == 23 || shapeType == 21 || shapeType == 25 ||
* shapeType == 28) { // Mmin bbox[6] = Double.MAX_VALUE; // Mmax
* bbox[7] = Double.MIN_VALUE; }
*
* // Inital 50 for header. int fileLength = 50;
*
* //for ( int i = 0; i < this.records.length; i++ ) // Last
* chance before writing, make sure all the right components are
* // correctly populated. This method is intentionally
* inefficient int numRecords = 0; for ( MainPortionRecord record
* : records ) { if ( record.getRecord().getShapeType() !=
* shapeType ) throw new
* IllegalArgumentException("Mixed Shape Types Detected, This is not allowed"
* );
*
* ShapeRecord rec = record.getRecord();
*
* //Ensure mainportion record has the size component
* record.contentLength = rec.getContentLengthInWords();
*
* // Make sure record numbers and content length are set
* properly. record.recordNumber = ++numRecords;
* record.contentLength =
* record.getRecord().getContentLengthInWords(); fileLength += 4
* + record.contentLength;
*
* double[] shapeBBox = rec.getBox();
*
* // Modify summary bounding boxes if necessary if ( shapeBBox
* != null ) { // Adjust bounding region if necessary if (
* shapeBBox[0] < bbox[0] ) // Xmin bbox[0] = shapeBBox[0];
*
* if ( shapeBBox[2] > bbox[2] ) // Xmax bbox[2] = shapeBBox[2];
*
* if ( shapeBBox[1] < bbox[1] ) // Ymin bbox[1] = shapeBBox[1];
*
* if ( shapeBBox[3] > bbox[3] ) // Ymax bbox[3] = shapeBBox[3];
* }
*
* // Modify measurment bounds if necessary if ( rec instanceof
* MeasureMinMax ) { MeasureMinMax recM = (MeasureMinMax)rec;
*
* if ( recM.getMeasurementMin() < bbox[4] ) bbox[4] =
* recM.getMeasurementMin();
*
* if ( recM.getMeasurementMax() > bbox[5] ) bbox[5] =
* recM.getMeasurementMax(); }
*
* // Modify Z bounds if necessary if ( rec instanceof ZMinMax )
* { ZMinMax recM = (ZMinMax)rec;
*
* if ( recM.getZMin() < bbox[4] ) bbox[4] = recM.getZMin();
*
* if ( recM.getZMax() > bbox[5] ) bbox[5] = recM.getZMax(); } }
*
* this.header = new ShapeHeader(fileLength,shapeType, bbox);
* //this.index = new IndexPortion(this.header,this.records);
* } */
/**
* Retrieves all of the shapes in the main portion
*
* @return Array of shape records
*
* @throws IOException
* @throws IllegalAccessException
* @throws InstantiationException
*/
/* public MainPortionRecord[] getShapes() throws IOException,
* InstantiationException, IllegalAccessException { boolean progress = true;
* MainPortionRecord[] returnArray = null;
*
* if ( records == null ) { if ( header == null ) { progress =
* parseHeader(); } if ( progress ) { progress = parseRecords(); } }
*
* // just to be sure returnArray = (MainPortionRecord[]) (( progress ) ?
* records.toArray(new MainPortionRecord[0]) : null);
*
* return returnArray; } */
@Override
public boolean add(ShapeRecord e)
{
if (mode == ESRIShapefile.Mode.WRITE)
{
checkHeader(e);
int offset = header.fileLength / 2;
int contentLength = e.getContentLengthInWords();
index.records.add(index.new IndexPortionRecord(offset, contentLength));
int bytesToAllocate = contentLength * 2 + 8;
// MainPortionRecord has 8 extra bytes
ByteBuffer buffer = ByteBuffer.allocate(bytesToAllocate);
buffer.order(ByteOrder.BIG_ENDIAN);
buffer.putInt(index.records.size());
buffer.putInt(contentLength);
buffer.order(ByteOrder.LITTLE_ENDIAN);
e.writeRecord(buffer);
FileChannel thisFile = outputStream.getChannel();
buffer.flip();
try
{
thisFile.position(offset * 2);
thisFile.write(buffer);
}
catch (IOException e1)
{
// TODO Auto-generated catch block
e1.printStackTrace();
// RETHROW in something that matches the interface
throw new IllegalStateException(e1);
}
updateHeader(e);
}
else
{
// Not setup for writing
throw new NonWritableChannelException();
}
// Per spec, in this case we throw exceptions on fail, not return false
// Returning false is reserved for only the 'set' case where duplicates
// are
// not allowed to be added.
return true;
}
@Override
public boolean addAll(Collection<? extends ShapeRecord> c)
{
if (mode == ESRIShapefile.Mode.WRITE)
{
for (ShapeRecord shape : c)
{
add(shape);
}
}
else
{
// Not setup for writing
throw new NonWritableChannelException();
}
// Per spec, in this case we throw exceptions on fail, not return false
return true;
}
@Override
public void clear()
{
try
{
inputStream.getChannel().position(index.records.get(0).offset);
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
@Override
public boolean contains(Object o)
{
// Will be exceptionally slow on large files
boolean returnValue = false;
for (ShapeRecord shape : this)
{
returnValue |= shape.equals(o);
if (returnValue)
{
break;
}
}
return returnValue;
}
@Override
public boolean containsAll(Collection<?> c)
{
// Will be ridiculously slow on large files
boolean returnValue = true;
for (Object o : c)
{
returnValue &= contains(o);
if (!returnValue)
{
break;
}
}
return returnValue;
}
@Override
public boolean isEmpty()
{
return index.records.size() == 0;
}
@Override
public Iterator<ShapeRecord> iterator()
{
return new MainPortionIterator(this);
}
@Override
public boolean remove(Object o)
{
throw new UnsupportedOperationException();
}
@Override
public boolean removeAll(Collection<?> c)
{
throw new UnsupportedOperationException();
}
@Override
public boolean retainAll(Collection<?> c)
{
throw new UnsupportedOperationException();
}
@Override
public int size()
{
return index.records.size();
}
@Override
public Object[] toArray()
{
Object[] returnArray = null;
List<ShapeRecord> list = new LinkedList<>();
for (ShapeRecord shape : this)
{
list.add(shape);
}
returnArray = list.toArray();
return returnArray;
}
@Override
@SuppressWarnings("unchecked")
public <T> T[] toArray(T[] a)
{
if (a.length < size())
{
a = (T[])new ShapeRecord[size()];
}
int i = 0;
for (ShapeRecord sr : this)
{
a[i++] = (T)sr;
}
return a;
}
}
|
using System;
using System.Threading.Tasks;
namespace Outkeep.Grains.Tests.Fakes
{
public class FakeGrainLifecycle : IGrainLifecycle
{
public Task ActivateAsync()
{
// Simulate activation logic
Console.WriteLine("Grain activated");
return Task.CompletedTask;
}
public Task DeactivateAsync()
{
// Simulate deactivation logic
Console.WriteLine("Grain deactivated");
return Task.CompletedTask;
}
}
} |
package com.shadowolfyt.zander.commands;
import com.shadowolfyt.zander.ZanderMain;
import com.shadowolfyt.zander.guis.WhitelistGUI;
import com.shadowolfyt.zander.guis.WhitelistListGUI;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.OfflinePlayer;
import org.bukkit.Sound;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
public class whitelist implements CommandExecutor {
private final String EN_ERR_PLAYER_DOES_NOT_EXIST = ChatColor.RED + "Player does not exist!";
@Override
public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args) {
if (!sender.hasPermission("zander.whitelist")) {
// No permissions.
sender.sendMessage(ChatColor.RED + "You don't have permission to use this command.");
return true;
}
// Sender has 'zander.whitelist'
if (args.length == 0 && (sender instanceof Player)) {
new WhitelistGUI((Player) sender);
return true;
} else if (args.length == 0) {
sender.sendMessage(ChatColor.YELLOW + "The GUI is only available for players only.");
return true;
}
// args != 0
if (args[0].equalsIgnoreCase("add")) {
OfflinePlayer target = Bukkit.getOfflinePlayer(args[1]);
if (target != null) {
if (target.isWhitelisted()) {
sender.sendMessage(ChatColor.RED + " This player is already whitelist.");
} else {
target.setWhitelisted(true);
Bukkit.getServer().reloadWhitelist();
Bukkit.broadcastMessage(ChatColor.LIGHT_PURPLE + target.getName() + " has been whitelisted.");
for (Player pl : Bukkit.getOnlinePlayers()) {
pl.playSound(pl.getLocation(), Sound.ENTITY_ENDER_DRAGON_GROWL, 100000, 1);
}
}
} else {
sender.sendMessage(EN_ERR_PLAYER_DOES_NOT_EXIST);
}
return true;
}
if (args[0].equalsIgnoreCase("remove")) {
OfflinePlayer target = Bukkit.getOfflinePlayer(args[1]);
if (target != null) {
if (!target.isWhitelisted()) {
sender.sendMessage( ChatColor.RED + "This player is not in the whitelist.");
} else {
target.setWhitelisted(false);
Bukkit.getServer().reloadWhitelist();
Bukkit.broadcastMessage(ChatColor.RED + target.getName() + " has been removed from the whitelist.");
}
} else {
sender.sendMessage(EN_ERR_PLAYER_DOES_NOT_EXIST);
}
} else if (args[0].equalsIgnoreCase("list")) {
new WhitelistListGUI((Player) sender);
return true;
}
return true;
}
}
|
#!/bin/sh
set -eu
docker run -t -d --rm \
-p 8501:8501 \
-p 8500:8500 \
--name text \
-v $(pwd)/saved_model/text:/models/text \
-e MODEL_NAME=text \
tensorflow/serving:2.3.0
|
// Function to generate success alert message and store it in the session
function generateSuccessAlert($message) {
Session::put('alert-success', $message);
}
// Modified code snippet to display the success alert message
@if (Session::has('alert-success'))
<div class="alert alert-success alert-dismissible" style="margin-top: 10px; margin-bottom: -10px;">
<a href=""><button type="button" class="close" data-dismiss="alert">×</button></a>
{{Session::get('alert-success')}}
</div>
@endif
</section>
<!-- Main content --> |
<reponame>uk-gov-mirror/hmrc.trader-services-route-one
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.traderservices.connectors
import java.time.format.DateTimeFormatter
import java.time.ZoneId
import java.{util => ju}
import java.time.ZonedDateTime
/** Provides PEGA API headers */
trait PegaConnector {
final val httpDateFormat = DateTimeFormatter
.ofPattern("EEE, dd MMM yyyy HH:mm:ss z", ju.Locale.ENGLISH)
.withZone(ZoneId.of("GMT"))
/** Headers required by the PEGA API */
final def pegaApiHeaders(correlationId: String, environment: String, token: String): Seq[(String, String)] =
Seq(
"x-correlation-id" -> correlationId,
"CustomProcessesHost" -> "Digital",
"date" -> httpDateFormat.format(ZonedDateTime.now),
"accept" -> "application/json",
"environment" -> environment,
"Authorization" -> s"Bearer $token"
)
}
|
<gh_stars>0
const Layout = () => import("/@/layout/index.vue");
const IFrame = () => import("/@/layout/frameView.vue");
const pptRouter = {
path: "/ppt",
component: Layout,
redirect: "/ppt/index",
meta: {
icon: "ppt",
title: "PPT",
i18n: false,
rank: 3
},
children: [
{
path: "/ppt/index",
name: "reFrameppt",
component: IFrame,
meta: {
title: "PPT",
i18n: false,
frameSrc: "https://pipipi-pikachu.github.io/PPTist/",
extraIcon: {
svg: true,
name: "team-iconxinpin"
}
}
}
]
};
export default pptRouter;
|
#!/bin/bash
dieharder -d 202 -g 208 -S 171451136
|
emulator -avd and100 -qemu &
|
<gh_stars>0
import React from 'react'
import Test from '../common/Test'
const Home = () => (
<div className="text-center">
<h1>This is Home View.</h1>
<Test />
</div>
)
export default Home
|
<filename>js/manage_news.js
function drawpage_sad_news(page){
search=$("#inf_search").val();
limit=$("#inf_limit").val();
redrawrs_admin_news(search,limit,page);
}
function drawsearch_news_qt(event,e){
limit=$("#inf_limit").val();
if(e.keyCode=='13'){
search= $(event).val();
redrawrs_admin_news(search,limit,0);
}
}
function drawsearch_news(){
search=$("#search_mng_qt").val();
limit=$("#inf_limit").val();
page=$("#inf_page").val();
redrawrs_admin_news(search,limit,0);
}
function redrawrs_admin_news(search,limit,page){
console.log(search,limit,page);
dt=JSON.stringify({'search':search,
'limit':limit,
'page':page
});
$('#home1').empty();
$.ajax({
type: 'POST',
url: site_url+"/sadmin/get_data_news/",
data: dt,
contentType: 'application/json',
success: function(data) {
console.log(data);
html = '<table class="table table-bordered" style="font-size: 14px;width: 95%;"><tr style="background-color: rgba(60, 141, 188, 0.28);">';
html+= '<th>#</th><th class="ta-c new_avatar" height="80" width="130" >Avatar</th><th height="80" width="130" >Tiêu đề tin</th><th height="80" width="380" >Mô tả</th><th>Thể loại</th><th>Ngày tạo</th><th>vị trí</th><th>Action</th>';
// html+= '<th ><input id="search_mng_qt" style="width:70%;margin-left: 15%" onkeyup="drawsearch_news_qt(this,event)">';
// html+= '<i class="pointer fa fa-search" onclick="drawsearch_news()"></i>';
html+= '</tr>';
console.log(data.list_news.length);
for(i=0;i<data.list_news.length;i++){
html+= '<tr><td>'+data.list_news[i]['id']+'</td>';
html+= '<td><img src="'+data.list_news[i]['avatar']+'"><img></td>';
html+= '<td>'+data.list_news[i]['name']+'</td>';
html+= '<td>'+data.list_news[i]['description']+'</td>';
html+= '<td>'+data.list_news[i]['category']+'</td>';
html+= '<td>'+data.list_news[i]['modify_date']+'</td>';
html+= '<td class="ta-c new_pos"><input type="text" name="inp-pos" style="width: 50px;text-align: center;font-weight: 600;color: #3c8dbc;" value="'+data.list_news[i]['pos']+'"></td>';
html+= '<td><a href="'+site_url+'/sadmin/edit_news/'+data.list_news[i]['id']+'"><i class="fa fa-pencil-square-o" title="Sửa" style="margin-left: 20%;"></i></a>';
html+= '<a href="#" class="delete" id="'+data.list_news[i]['id']+'"><i class="fa fa-trash" style="margin-left: 20%;" onclick="delete_news('+data.list_news[i]['id']+')" title="Xóa"></i></a>';
html+= '</td>';
html+= '</tr>';
}
html+= '</table>';
html+='<div class="col-md-12"><p>Đang xem <span id="beginqt">';
html+= Math.min(limit*page+1,data.num_list);
html+='</span>';
html+=' đến <span id="endqt">'+Math.min(limit*(page+1),data.num_list)+'</span>';
html+=' trong tổng số <span id="totalqt">'+data.num_list;
html+='</span> mục tin tức</p></div>';
html+='<center><div id="pagination" class="row">';
html+='<div id="pagination_page" class="col-md-7">';
html+='<ul class="pagination listpage pageqt">';
if(data.num_page<7){
for(i=0; i<data.num_page; i++){
html+='<li class="page-item';
if(i==page){
html+=' active';
}
html+='" onclick="drawpage_sad_news('+i+')"><a class="page-link">'+(i+1)+'</a></li>'
}
}
else{
if(page<=3){
for(i=0; i<5; i++){
html+='<li class="page-item';
if(i==page){
html+=' active';
}
html+='" onclick="drawpage_sad_news('+i+')"><a class="page-link">'+(i+1)+'</a></li>'
}
html+='<li class="page-item"><a class="page-link">...</a></li>';
html+='<li class="page-item" onclick="drawpage_sad_news('+(data.num_page-1)+')"><a class="page-link">'+data.num_page+'</a></li>';
}
else{
html+='<li class="page-item" onclick="drawpage_sad_news(0)"><a class="page-link">1</a></li>';
html+='<li class="page-item"><a class="page-link">...</a></li>';
if(page<data.num_page-4){
html+='<li class="page-item" onclick="drawpage_sad_news('+(page-1)+')"><a class="page-link">'+page+'</a></li>';
html+='<li class="page-item active" onclick="drawpage_sad_news('+page+')"><a class="page-link">'+(page+1)+'</a></li>';
html+='<li class="page-item" onclick="drawpage_sad_news('+(page+1)+')"><a class="page-link">'+(page+2)+'</a></li>';
html+='<li class="page-item"><a class="page-link">...</a></li>';
html+='<li class="page-item" onclick="drawpage_sad_news('+(data.num_page-1)+')"><a class="page-link">'+data.num_page+'</a></li>';
}
else{
for(i=page-2; i<data.num_page; i++){
html+='<li class="page-item';
if(i==page){
html+=" active";
}
html+='" onclick=drawpage_sad_news('+i+')"><a class="page-link">'+(i+1)+'</a></li>';
}
}
}
}
html+='</ul></div></div>';
html+='<div style="display: none">';
html+='<input type="text" id="inf_page" value="0">';
html+='<input type="text" id="inf_limit" value="5">';
html+='</div>';
$("#home1").append(html);
},
error: function(data) {
console.log(data);
}
});
}
$(document).on('click', '.delete', function(){
var id = $(this).attr('id');
if(confirm("Are you sure you want to Delete this data?"))
{
$.ajax({
url:site_url+"/sadmin/delete_news",
mehtod:"get",
data:{id:id},
success:function(data)
{
alert('xoa thanh cong');
location.reload();
}
})
}
else
{
return false;
}
});
function preview_news(id){
}
function edit_news(id){
}
|
<filename>src/todo/Todo.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package todo;
import com.sun.glass.events.KeyEvent;
import java.awt.AWTException;
import java.awt.Robot;
import java.awt.SystemTray;
import java.awt.TrayIcon;
import java.io.File;
import java.io.IOException;
import java.net.BindException;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.sql.SQLException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javafx.application.Application;
import javafx.application.Platform;
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.stage.Stage;
import jfx.messagebox.MessageBox;
import org.jnativehook.GlobalScreen;
import org.jnativehook.NativeHookException;
import org.jnativehook.keyboard.NativeKeyEvent;
import org.jnativehook.keyboard.NativeKeyListener;
import todo.hsqldb.DbUtils;
import todo.hsqldb.HSQLDBConfigurator;
import todo.hsqldb.HSQLDBConnector;
/**
*
*
* @author <NAME>
*/
public class Todo extends Application {
//to-do area here
//TODO change ommit cdgDat and use Global configurator instead..
//TODO finish custom connection methods
//TODO restart pc to register nativehook and test
private static StringBuilder todoStringBuilder = new StringBuilder();
private static StringBuilder unicodeStringBuilder = new StringBuilder();
private static char[] charBuffer = new char[0x6];
private static char[] charTemp = {'/', '/', 'T', 'O', 'D', 'O'};
private static Robot robot;
private static boolean isScan;
private static boolean isBuffer;
private static boolean isRemoving;
//dirs
public static final String USER_HOME = System.getProperty("user.home");
public static final String TODO_DIR = USER_HOME + File.separator + "TODO";
public static final String TODO_CONFIG_FILE = TODO_DIR + File.separator + "todocfg.cfg";
private static final int PORT = 17754;
private static ServerSocket socket;
public static DataModel datMod;
public static String[] cfgDat;
private static Pattern pattern;
private static Matcher matcher;
private static char current;
public static TrayIcon icon;
public static HSQLDBConfigurator globalConfigurator = HSQLDBConfigurator.getInstance();
public static HSQLDBConnector connector;
private static Stage stage;
public static boolean isCustomConnection;
public static String[] customConnectionData;
private Tray tray;
private boolean isTray = true;
public static FXMLMainViewController fxc;
static {
try {
robot = new Robot();
} catch (AWTException ex) {
ex.printStackTrace();
}
}
@Override
public void start(Stage primaryStage) throws Exception {
fxc = new FXMLMainViewController();
FXMLLoader loader = new FXMLLoader(
getClass().getResource(
"FXMLMainView.fxml"
)
);
loader.setController(fxc);
Parent root = (Parent) loader.load();
Scene scene = new Scene(root);
stage = primaryStage;
stage.getIcons().add(new javafx.scene.image.Image(Todo.class.getResourceAsStream("/todo/res/logobar.png")));
stage.setWidth(1166);
stage.setHeight(676);
stage.setTitle("//TODO LazyList//");
stage.setScene(scene);
stage.show();
if (isTray) {
tray = new Tray(stage, this);
isTray = false;
}
Platform.setImplicitExit(false);
datMod.updateTable(fxc.tableView, fxc.getTcmns());
}
public static void main(String[] args) {
try {
//Bind to localhost adapter with a zero connection queue
socket = new ServerSocket(PORT, 0, InetAddress.getByAddress(new byte[]{127, 0, 0, 1}));
} catch (BindException e) {
try {
Platform.runLater(()->{
MessageBox.show(null,
"Another instance of this application is already running.",
"Already running",
MessageBox.ICON_INFORMATION);
});
Thread.sleep(3000);
System.exit(1);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
} catch (IOException e) {
try {
Platform.runLater(()->{
MessageBox.show(null,
"Error has occured",
"Exception",
MessageBox.ICON_ERROR);
});
Thread.sleep(3000);
e.printStackTrace();
System.exit(2);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
}
datMod = new DataModel();
fileManagerCheck();
globalConfigurator.setConfiguration(datMod.readCfg());
if (globalConfigurator.isFtr()) {
connector = new HSQLDBConnector(isCustomConnection);
try {
connector.statement.executeUpdate("CREATE TABLE todos (todoId INTEGER IDENTITY,category VARCHAR(255) NOT NULL,"
+ "message VARCHAR(255) NOT NULL,priority VARCHAR(255) NOT NULL);");
datMod.overrideCfg(TODO_DIR, "file", "TODO", "TODODB", "false");
connector.closeConnector();
} catch (SQLException ex) {
ex.printStackTrace();
connector.closeConnector();
}
Platform.runLater(() -> {
MessageBox.show(null,
"Read instructions!They help you get started.",
"Information",
MessageBox.ICON_INFORMATION | MessageBox.OK);
});
}
//check if database uses custom conenction checks also for table if its included in there.
if (Files.exists(Paths.get(FXMLMainViewController.TODO_CUSTOM_CONNECTION_CONFIG))) {
try {
DbUtils.checkCustomConnection();
} catch (Exception e) {
e.printStackTrace();
}
}
Thread t = new Thread(() -> {
clearBuff();
startListening();
});
t.setDaemon(true);
t.start();
launch(args);
}
private static void fileManagerCheck() {
if (!Files.exists(Paths.get(TODO_DIR))) {
try {
Files.createDirectory(Paths.get(TODO_DIR));
} catch (IOException ex) {
ex.printStackTrace();
}
}
if (!Files.exists(Paths.get(TODO_CONFIG_FILE))) {
try {
Files.createFile(Paths.get(TODO_CONFIG_FILE));
//overrride with default
datMod.overrideCfg(TODO_DIR, "file", "TODO", "TODODB", "true");
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
private static void startListening() {
try {
GlobalScreen.registerNativeHook();
} catch (NativeHookException ex) {
System.out.println(ex.getMessage());
System.exit(1);
}
GlobalScreen.getInstance().addNativeKeyListener(new GlobalKeyListener());
System.out.println("Hook state: " + GlobalScreen.isNativeHookRegistered());
}
private static String buffGetString() {
String s = "";
for (char c : charBuffer) {
s += c;
}
return s;
}
private static boolean isInit() {
return buffGetString().equals(TodoEntry.PREFIX);
}
private static void clearBuff() {
for (int i = 0; i < charBuffer.length; i++) {
charBuffer[i] = '\u10BF';
}
}
private static void removeIntext(int a) {
new Thread(() -> {
isRemoving = true;
for (int i = 0; i < a; i++) {
robot.keyPress(KeyEvent.VK_BACKSPACE);
robot.keyRelease(KeyEvent.VK_BACKSPACE);
}
isRemoving = false;
}).start();
}
private static boolean isFollowing() {
boolean isFollowing = true;
for (int i = 0; i < charTemp.length; i++) {
if (charBuffer[i] == (charTemp[i]) || charBuffer[i] == 4287) {
isFollowing = true;
} else {
return false;
}
}
return isFollowing;
}
private static void submitEntry(String entry) {
TodoEntry todoEntry = processEntry(entry);
if (todoEntry != null) {// check for validity of message .. length etc
if (todoEntry.getMessage().length() >= TodoEntry.MAX_SIZE) {
if (SystemTray.isSupported()) {
icon.displayMessage("TODO", "Todo failed-MAX size reached", TrayIcon.MessageType.ERROR);
}
return;
}
if (todoEntry.getMessage().length() <= TodoEntry.MIN_SIZE) {
if (SystemTray.isSupported()) {
icon.displayMessage("TODO", "Todo failed-MIN size not exceeded", TrayIcon.MessageType.ERROR);
}
return;
}
DbUtils.addTodoIntoDb(todoEntry);
datMod.updateTable(fxc.tableView, fxc.getTcmns());
if (SystemTray.isSupported()) {
icon.displayMessage("TODO", "Todo submitted", TrayIcon.MessageType.INFO);
}
} else {
if (SystemTray.isSupported()) {
icon.displayMessage("TODO", "Todo null", TrayIcon.MessageType.ERROR);
}
}
}
private static TodoEntry processEntry(String entry) {
String[] parsed = new String[3];
String[] regexes = {"^(\\s*/\\w+\\s)",
"(s*/(LOW|MEDIUM|HIGH|low|medium|high)\\s*)$"};
for (int i = 0; i < regexes.length; i++) {
pattern = Pattern.compile(regexes[i], Pattern.DOTALL);
matcher = pattern.matcher(entry);
if (matcher.find()) {
parsed[i] = matcher.group().replace("/", " ").trim();
entry = entry.replaceAll(regexes[i], "");
}
}
parsed[2] = entry;
return new TodoEntry(parsed[0], parsed[2], parsed[1]);
}
private static void closeScan() {
todoStringBuilder.delete(0, todoStringBuilder.length());
clearBuff();
isScan = false;
}
private static class GlobalKeyListener implements NativeKeyListener {
int c = 0;
public GlobalKeyListener() {
}
@Override
public void nativeKeyReleased(NativeKeyEvent nke) {//Might fuck up diff key locals , shoud be used key typed instead , might patch later on, works on ENG tho...
//TODO HAVE TO IGNORE SOME CHARACTERS WHEN SCANNER ENABLED AND SCANNING FOR MESASGE
//normal initialization
current = nke.getKeyCode() == TodoEntry.SLASH ? '/' : (char) nke.getKeyCode();
if (nke.paramString().contains("NumPad")) {
current = nke.paramString().charAt(nke.paramString().indexOf("NumPad ") + 7);
}
if (isScan) {//scanning phase
if (current == '\u0008') {
todoStringBuilder.deleteCharAt(todoStringBuilder.length() - 1);
} else {
todoStringBuilder.append(current);
}
if (todoStringBuilder.toString().endsWith(TodoEntry.POSTFIX)) {//successfull entry
submitEntry(todoStringBuilder.toString().substring(0, todoStringBuilder.length() - 2));
int backupby = todoStringBuilder.toString().length() + 6;
removeIntext(backupby);
closeScan();
}
} else {
if (isBuffer) {
if (current == '\u0008') {
return;
}
///fill buffer
charBuffer[c] = current;
c++;
if (isFollowing()) {
if (c == charBuffer.length) {//if c is at the end of ... check if shoud start scanning
if (isInit()) {//check scanning
isScan = true;
isBuffer = false;
} else {
isBuffer = false;
}
c = 0;//if shoud not scan get buffer iterator at start
}
} else {
isBuffer = false;
clearBuff();
}
} else {
if (current == '/') {//check if to start buffer
isBuffer = true;
charBuffer[0] = '/';
c = 1;
}
}
}
}
@Override
public void nativeKeyPressed(NativeKeyEvent nke) {
}
@Override
public void nativeKeyTyped(NativeKeyEvent nke) {
}
}
}
|
#!/bin/bash
echo "================================="
echo " Cytus II DB Build Tool V2.0 "
echo " A.R.C. Tech. "
echo "================================="
# clean
rm -rf ./res/export
rm -rf ./res/unitydata
rm -rf ./res/unitybundles
if [ ! $1 ]; then
echo "Version not defined!"
exit
fi
mkdir ./res/export
mkdir ./res/unitydata
mkdir ./res/unitybundles
mkdir ./res/export/audios
mkdir ./res/export/videos
mkdir ./res/export/images
mkdir ./res/export/audios/extra
mkdir ./res/export/audios/story
mkdir ./res/export/videos/extra
mkdir ./res/export/videos/titles
mkdir ./res/export/videos/story
mkdir ./res/export/videos/song_select
mkdir ./res/export/images/characters
echo "Cleaned cache."
# unzip files
unzip -q "./raw/$1.apk" -d ./res/apk
echo "Unziped APK."
unzip -q "./raw/$1.obb" -d ./res/obb
echo "Unziped OBB."
unzip -q "./raw/$1.patch.obb" -d ./res/vid
echo "Unziped Patch OBB."
# move unity
mv ./res/apk/assets/bin/Data/* ./res/unitydata
mv ./res/obb/assets/bin/Data/* ./res/unitydata
echo "Migrated Unity."
# move obb
mv ./res/obb/assets/AssetBundles/* ./res/unitybundles
mv ./res/vid/assets/RawAssets/EndPlay/* ./res/export/videos
mv ./res/vid/assets/RawAssets/OASystem/* ./res/export/videos
mv ./res/vid/assets/RawAssets/Story/* ./res/export/videos
mv ./res/vid/assets/RawAssets/System/* ./res/export/videos
mv ./res/vid/assets/RawAssets/TrueEnd/* ./res/export/videos
rm -rf ./res/vid/assets/RawAssets/EndPlay
rm -rf ./res/vid/assets/RawAssets/OASystem
rm -rf ./res/vid/assets/RawAssets/Story
rm -rf ./res/vid/assets/RawAssets/System
rm -rf ./res/vid/assets/RawAssets/TrueEnd
mv ./res/vid/assets/RawAssets/Title/* ./res/export/videos/titles
mv ./res/vid/assets/RawAssets/SongSelect/* ./res/export/videos/song_select
rm -rf ./res/vid/assets/RawAssets/Title
rm -rf ./res/vid/assets/RawAssets/SongSelect
cp ./res/export/videos/*.mp4 ./res/export/videos/story
mv ./res/vid/assets/RawAssets/* ./res/export/videos
echo "Migrated OBB."
# clean source
rm -rf ./res/apk ./res/obb ./res/vid
echo "Finished."
|
interface DataTypes<T> {
data: T;
}
interface GenericIdentityFn<Type> {
<Type>(arg: Type): Type;
}
// here we basically define the type of the function
let myIdentity: GenericIdentityFn<number> = defineType;
// we can now pass anything to it
function defineType<T>(arg: T): T {
return arg;
}
// you can also explicity define de type
const string = defineType<string>("hello");
const number = defineType(3);
// working with arrays as parameters should be the same but instead of passing only the T type it should be T[]
function loggingIdentity<T>(arg: T[]): T[] {
// since we define parameter as T[] it knows its an array so we have the array methods.
console.log(arg.length);
return arg;
}
const strValues: DataTypes<string> = {
data: "name",
};
const arrayValues: DataTypes<string[]> = {
data: ["name", "age"],
};
// think of this as a function and T is a parameter, so it can be anything but when actually calling the ResultTypes you have to pass the Type
interface ResultTypes<T> {
name?: string;
lastName?: string;
age?: number;
info?: T;
}
// Example here i want the type of info to be a string
const resultStr: ResultTypes<string> = {
name: "anthony",
lastName: "amaro",
age: 26,
info: "",
};
const resultNum: ResultTypes<number> = {
name: "anthony",
lastName: "amaro",
age: 26,
info: 0,
};
function defineObj<T>(): ResultTypes<string> {
const result: ResultTypes<string> = {
name: "anthony",
lastName: "amaro",
age: 26,
info: "",
};
return result;
}
function App() {
return (
<div className="App">
<h1>TypeScript Basics</h1>
</div>
);
}
export default App;
|
#split -l 30 ../mir_seed_2_8.txt
path="/home/fux/fux/miRNASNP3/target/TargetScan/turn03"
find $path -name "x*" > track_files
cat track_files|while read file
do
briefname=`echo ${file:0-3}`
echo "nohup perl /home/fux/fux/miRNASNP3/TargetScan/targetscan_70.pl ${file} utr3.txt Targetscan_wildmir_result_03_${briefname} 2>err.${briefname} &">>work_scr.sh
done
|
import {useLinkTo} from '@react-navigation/native';
import {Col, makeContainerStyles, Row} from '../../containers';
import {useAuthLabor} from '../../providers/auth-labor';
import {IcoMoon, Link, Text} from '../UI';
import {LinearGradientIcon} from '../LinearGradientIcon';
import {TouchableOpacity} from 'react-native';
import * as React from 'react';
import {IcoMoonKeys} from '../../types';
import {makeStyles} from './styles';
import {useBunnyKit} from '../../hooks/bunny-kit';
export interface LineToProps {
iconName?: IcoMoonKeys,
iconSize?: number,
text?: string,
to?: string,
type: 'LINK' | 'LINK_TO' | 'NAV',
onNav?: () => void
}
export const InlineJump: React.FC<LineToProps> = (props) => {
const {sizeLabor, themeLabor, wp} = useBunnyKit();
const linkTo = useLinkTo();
const containerStyles = makeContainerStyles(sizeLabor, themeLabor);
const styles = makeStyles(sizeLabor, themeLabor);
const {authFunctions} = useAuthLabor();
const {iconName, iconSize = wp(22), text, to, type, onNav, children} = props;
const {colors} = themeLabor.theme;
const iconColor = {color: colors.buttonText};
return type === 'LINK'
?
to ? <Link to={to}>
{
children
? children
: <Row paddingVertical="xl">
<Col size={3}>
<Row>
{
iconName
? <LinearGradientIcon size={iconSize} name={iconName}/>
: null
}
<Text style={styles.label}>{text}</Text>
</Row>
</Col>
<Col size={1} style={styles.rightWrapper}>
<IcoMoon name="chevron-right1"/>
</Col>
</Row>
}
</Link> : null
: <TouchableOpacity onPress={() => {
switch (type) {
case 'LINK_TO':
if (to) {
linkTo(to);
}
break;
case 'NAV':
if (onNav) {
onNav();
}
break;
}
}
}>
<Row paddingVertical="xl">
<Col size={3}>
<Row>
{
iconName
? <LinearGradientIcon size={iconSize} name={iconName}/>
: null
}
<Text style={styles.label}>{text}</Text>
</Row>
</Col>
<Col size={1} style={styles.rightWrapper}>
<IcoMoon name="chevron-right1"/>
</Col>
</Row>
</TouchableOpacity>;
};
|
<filename>src/components/search/styled.ts
// ┌──────────────────────────────────────────────────────────────────────────────────────────────┐
// │ Copyright (c) 2021 by the author of the React-weather project. All rights reserved. │
// │ This owner-supplied source code has no limitations on the condition imposed on the │
// │ maintenance of the copyright notice. │
// │ For more information, read the LICENSE file at the root of the project. │
// │ Written by author <NAME> <<EMAIL>>. │
// └──────────────────────────────────────────────────────────────────────────────────────────────┘
import styled, { css } from 'styled-components';
import ImagePuff from '~/assets/img/puff.svg';
import ImageSearch from '~/assets/img/search.svg';
export const Container = styled.form`
display: flex;
align-items: center;
height: 40px;
width: 100%;
padding: 5px;
border-radius: 30px;
background-color: rgb(194 194 194 / 35%);
.theme-dark & {
background-color: rgb(194 194 194 / 20%);
}
`;
export const TextField = styled.input`
flex-grow: 1 1 auto;
height: 100%;
width: 100%;
border: 0;
color: var(--text-color);
background-color: transparent;
font-weight: bold;
`;
const cssIcon = css`
height: 30px;
width: 30px;
margin: 0 10px;
background-size: contain;
opacity: 0.3;
.theme-dark & {
filter: invert(100%) sepia(0%) saturate(0%) hue-rotate(93deg) brightness(103%)
contrast(103%);
}
`;
export const IconSearch = styled.i`
${cssIcon}
background: url(${ImageSearch}) no-repeat center;
`;
export const IconLoading = styled.i`
${cssIcon}
background: url(${ImagePuff}) no-repeat center;
`;
|
<gh_stars>100-1000
package dev.webfx.kit.mapper.peers.javafxcontrols.gwt.html;
import elemental2.dom.HTMLElement;
import elemental2.dom.HTMLInputElement;
import dev.webfx.kit.mapper.peers.javafxgraphics.gwt.html.layoutmeasurable.HtmlLayoutMeasurable;
import dev.webfx.kit.mapper.peers.javafxgraphics.gwt.html.HtmlRegionPeer;
import dev.webfx.kit.mapper.peers.javafxcontrols.base.SliderPeerBase;
import dev.webfx.kit.mapper.peers.javafxcontrols.base.SliderPeerMixin;
import dev.webfx.platform.shared.util.Numbers;
import dev.webfx.platform.shared.util.Strings;
import dev.webfx.kit.mapper.peers.javafxgraphics.gwt.util.HtmlUtil;
import javafx.scene.control.Slider;
/**
* @author <NAME>
*/
public final class HtmlSliderPeer
<N extends Slider, NB extends SliderPeerBase<N, NB, NM>, NM extends SliderPeerMixin<N, NB, NM>>
extends HtmlRegionPeer<N, NB, NM>
implements SliderPeerMixin<N, NB, NM>, HtmlLayoutMeasurable {
public HtmlSliderPeer() {
this((NB) new SliderPeerBase(), HtmlUtil.createInputElement("range"));
}
public HtmlSliderPeer(NB base, HTMLElement element) {
super(base, element);
HTMLInputElement inputElement = (HTMLInputElement) getElement();
setElementStyleAttribute("-web-kit-appearance", "slider-vertical");
inputElement.oninput = e -> {
updateNodeValue(Numbers.doubleValue(inputElement.value));
return null;
};
}
@Override
public void updateMin(Number min) {
HTMLInputElement inputElement = (HTMLInputElement) getElement();
inputElement.min = Strings.toString(min);
}
@Override
public void updateMax(Number max) {
HTMLInputElement inputElement = (HTMLInputElement) getElement();
inputElement.max = Strings.toString(max);
}
@Override
public void updateValue(Number value) {
HTMLInputElement inputElement = (HTMLInputElement) getElement();
inputElement.value = Strings.toString(value);
}
}
|
#include<bcli/bcli.hpp>
using namespace bc;
std::string help()
{
std::stringstream ss;
ss << "Custom help" << "\n";
ss << "-s short param" << "\n";
ss << "--long long param" << "\n";
ss << "-b, --both short and long" << "\n";
ss << "-f, --flag no param" << "\n";
return ss.str();
}
int main(int argc, char* argv[])
{
Parser<0> cli("ex08",
"ex08 description",
"v0.0.1",
"<NAME>");
cli.add_param("-s", "short param");
cli.add_param("--long", "long param")->def("long_default")->meta("LONG");
cli.add_param("-b/--both", "short and long")->def("both_default");
cli.add_param("-f/--flag", "flag param, without value")->as_flag();
cli.add_common();
cli.set_help(&help);
BCLI_PARSE(cli, argc, argv)
}
// Custom help
// -s short param
// --long long param
// -b, --both short and long
// -f, --flag no param |
#shell script to see if there are tags which are not declared in root.lexc or if tags are misspelled
echo 'Are there tags not declared in root.lexc or misspelled?'
cat src/fst/affixes/*lexc src/fst/stems/*lexc |cut -d '!' -f1 |grep ' ;' | cut -d ':' -f1 |tr -s ' ' |sed 's/^ //' | cut -d ' ' -f1 |sed 's/+/¢+/g' |sed 's/@/¢@/g'|tr '¢' '\n' | tr '#"' '\n'| egrep '(\+|@)' |sort -u | egrep -v '^(\+|\+%|\+\/\-|\+Cmp\-|\+Cmp%\-|\@0|\@%)$' > lexctags
cat src/fst/root.lexc |cut -d '!' -f1 |cut -d ':' -f1 |sed 's/+/¢+/g'|sed 's/@/¢@/g' |tr '¢' '\n' | egrep '(\+|@)' |tr -d ' ' | tr -d '\t'|sort -u > roottags
echo 'Have a look at these:'
comm -23 lexctags roottags
echo 'checked'
rm lexctags roottags
|
package com.trackorjargh.apirestcontrolers;
import java.util.LinkedList;
import java.util.List;
import org.apache.catalina.servlet4preview.http.HttpServletRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import com.fasterxml.jackson.annotation.JsonView;
import com.trackorjargh.commoncode.CommonCodeUser;
import com.trackorjargh.component.UserComponent;
import com.trackorjargh.javaclass.DeleteElementsOfBBDD;
import com.trackorjargh.javaclass.ForgotPassword;
import com.trackorjargh.javaclass.User;
import com.trackorjargh.javarepository.ForgotPasswordRepository;
import com.trackorjargh.javarepository.UserRepository;
@RestController
@RequestMapping("/api")
public class ApiUserController {
private final UserRepository userRepository;
private final UserComponent userComponent;
private final CommonCodeUser commonCodeUser;
private final ForgotPasswordRepository forgotPasswordRepository;
private final DeleteElementsOfBBDD deleteElementofBBDD;
@Autowired
public ApiUserController(UserRepository userRepository, CommonCodeUser commonCodeUser,
DeleteElementsOfBBDD deleteElementofBBDD, ForgotPasswordRepository forgotPasswordRepository,
UserComponent userComponent) {
this.userRepository = userRepository;
this.commonCodeUser = commonCodeUser;
this.deleteElementofBBDD = deleteElementofBBDD;
this.forgotPasswordRepository = forgotPasswordRepository;
this.userComponent = userComponent;
}
@RequestMapping(value = "/usuarios/administracion", method = RequestMethod.GET)
@JsonView(User.NameUserInfo.class)
public ResponseEntity<List<User>> getUsers() {
return new ResponseEntity<>(userRepository.findAll(), HttpStatus.OK);
}
@RequestMapping(value = "/usuarios/{name}", method = RequestMethod.GET)
@JsonView(User.BasicInformation.class)
public ResponseEntity<User> getUser(@PathVariable String name) {
User user = userRepository.findByNameIgnoreCase(name);
if (user == null) {
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
} else {
return new ResponseEntity<>(user, HttpStatus.OK);
}
}
@RequestMapping(value = "/usuarios", method = RequestMethod.POST)
@ResponseStatus(HttpStatus.CREATED)
@JsonView(User.BasicInformation.class)
public ResponseEntity<User> addUser(HttpServletRequest request, @RequestBody User user) {
if (userRepository.findByNameIgnoreCase(user.getName()) == null
&& userRepository.findByEmail(user.getEmail()) == null) {
return new ResponseEntity<>(commonCodeUser.newUserApi(user, request), HttpStatus.OK);
} else {
return new ResponseEntity<>(HttpStatus.IM_USED);
}
}
@RequestMapping(value = "/usuarios/{name}", method = RequestMethod.PUT)
@JsonView(User.BasicInformation.class)
public ResponseEntity<User> editUser(@PathVariable String name, @RequestBody User user) {
if (userRepository.findByNameIgnoreCase(name) == null) {
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
} else {
User editedUser = userRepository.findByNameIgnoreCase(name);
User logedUser = userComponent.getLoggedUser();
if (logedUser.getRoles().contains("ROLE_ADMIN")) {
return new ResponseEntity<>(commonCodeUser.editUser(editedUser, user.getEmail(), user.getPassword(),
user.getRoles(), user.getImage(), true), HttpStatus.OK);
} else if (name.equals(logedUser.getName())) {
return new ResponseEntity<>(commonCodeUser.editUser(editedUser, user.getEmail(), user.getPassword(),
new LinkedList<>(), user.getImage(), true), HttpStatus.OK);
} else {
return new ResponseEntity<>(HttpStatus.UNAUTHORIZED);
}
}
}
@RequestMapping(value = "/usuarios/{name}", method = RequestMethod.DELETE)
@JsonView(User.BasicInformation.class)
public ResponseEntity<User> deleteUser(@PathVariable("name") String name) {
User user = userRepository.findByNameIgnoreCase(name);
if (user == null) {
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
} else {
deleteElementofBBDD.deleteUser(user);
return new ResponseEntity<>(user, HttpStatus.OK);
}
}
public interface basicInfoForgotPassword extends ForgotPassword.BasicInformation, User.NameUserInfo {
}
@RequestMapping(value = "/restablecer/{alphanumericCode}/", method = RequestMethod.GET)
@JsonView(basicInfoForgotPassword.class)
public ResponseEntity<ForgotPassword> forgotPassword(@PathVariable("alphanumericCode") String alphanumericCode) {
ForgotPassword forgotPass = forgotPasswordRepository.findBySecretAlphanumeric(alphanumericCode);
if (forgotPass == null) {
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
} else {
return new ResponseEntity<>(forgotPass, HttpStatus.OK);
}
}
@RequestMapping(value = "/restablecer", method = RequestMethod.PUT)
@JsonView(basicInfoForgotPassword.class)
public ResponseEntity<Boolean> forgotPasswordChange(HttpServletRequest request, @RequestBody User userEmail) {
User user = userRepository.findByEmail(userEmail.getEmail());
if (user == null) {
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
} else {
return new ResponseEntity<>(commonCodeUser.forgotPass(user, request), HttpStatus.OK);
}
}
@RequestMapping(value = "/restablecer/{alphanumericCode}/", method = RequestMethod.POST)
@JsonView(basicInfoForgotPassword.class)
public ResponseEntity<ForgotPassword> forgotPasswordDelete(
@PathVariable("alphanumericCode") String alphanumericCode, @RequestBody User user) {
ForgotPassword forgotPass = forgotPasswordRepository.findBySecretAlphanumeric(alphanumericCode);
if (forgotPass == null) {
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
} else {
User userOld = forgotPass.getUser();
userOld.setPasswordCodificate(user.getPassword());
forgotPasswordRepository.delete(forgotPass);
userRepository.save(userOld);
return new ResponseEntity<>(forgotPass, HttpStatus.OK);
}
}
@RequestMapping(value = "/activar/{name}", method = RequestMethod.GET)
@JsonView(User.ActiveInformation.class)
public ResponseEntity<User> activatedUser(@PathVariable("name") String name) {
User user = userRepository.findByNameIgnoreCase(name);
if (user == null) {
return new ResponseEntity<>(HttpStatus.NOT_FOUND);
} else {
return new ResponseEntity<>(commonCodeUser.activatedUser(user), HttpStatus.OK);
}
}
@RequestMapping(value = "/comprobarusuario/{name}/", method = RequestMethod.GET)
public boolean checkUser(@PathVariable String name) {
User user = userRepository.findByNameIgnoreCase(name);
if (user != null) {
return true;
} else {
return false;
}
}
@RequestMapping(value = "/comprobaremail/{email}/", method = RequestMethod.GET)
public boolean checkEmail(@PathVariable String email) {
User user = userRepository.findByEmail(email);
if (user != null) {
return true;
} else {
return false;
}
}
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -ex
: ${R_BIN:=R}
# The Dockerfile should have put this file here
if [ -f "/arrow/ci/etc/rprofile" ]; then
# Ensure parallel R package installation, set CRAN repo mirror,
# and use pre-built binaries where possible
cat /arrow/ci/etc/rprofile >> $(${R_BIN} RHOME)/etc/Rprofile.site
fi
# Ensure parallel compilation of C/C++ code
echo "MAKEFLAGS=-j$(${R_BIN} -s -e 'cat(parallel::detectCores())')" >> $(R RHOME)/etc/Renviron.site
# Special hacking to try to reproduce quirks on fedora-clang-devel on CRAN
# which uses a bespoke clang compiled to use libc++
# https://www.stats.ox.ac.uk/pub/bdr/Rconfig/r-devel-linux-x86_64-fedora-clang
if [ "$RHUB_PLATFORM" = "linux-x86_64-fedora-clang" ]; then
dnf install -y libcxx-devel
sed -i.bak -E -e 's/(CXX1?1? =.*)/\1 -stdlib=libc++/g' $(${R_BIN} RHOME)/etc/Makeconf
rm -rf $(${R_BIN} RHOME)/etc/Makeconf.bak
sed -i.bak -E -e 's/(CXXFLAGS = )(.*)/\1 -g -O3 -Wall -pedantic -frtti -fPIC/' $(${R_BIN} RHOME)/etc/Makeconf
rm -rf $(${R_BIN} RHOME)/etc/Makeconf.bak
fi
# Special hacking to try to reproduce quirks on centos using non-default build
# tooling.
if [[ "$DEVTOOLSET_VERSION" -gt 0 ]]; then
if [ "`which dnf`" ]; then
dnf install -y centos-release-scl
dnf install -y "devtoolset-$DEVTOOLSET_VERSION"
else
yum install -y centos-release-scl
yum install -y "devtoolset-$DEVTOOLSET_VERSION"
fi
fi
# Install openssl for S3 support
if [ "$ARROW_S3" == "ON" ] || [ "$ARROW_R_DEV" == "TRUE" ]; then
if [ "`which dnf`" ]; then
dnf install -y libcurl-devel openssl-devel
elif [ "`which yum`" ]; then
yum install -y libcurl-devel openssl-devel
elif [ "`which zypper`" ]; then
zypper install -y libcurl-devel libopenssl-devel
else
apt-get update
apt-get install -y libcurl4-openssl-dev libssl-dev
fi
# The Dockerfile should have put this file here
if [ -f "/arrow/ci/scripts/install_minio.sh" ] && [ "`which wget`" ]; then
/arrow/ci/scripts/install_minio.sh latest /usr/local
fi
if [ -f "/arrow/ci/scripts/install_gcs_testbench.sh" ] && [ "`which pip`" ]; then
/arrow/ci/scripts/install_gcs_testbench.sh default
fi
fi
# Install patch if it doesn't already exist
if [ ! $(command -v patch) ]; then
if [ "`which dnf`" ]; then
dnf install -y patch
elif [ "`which yum`" ]; then
yum install -y patch
elif [ "`which zypper`" ]; then
zypper install -y patch
else
apt-get update
apt-get install -y patch
fi
fi
# Workaround for html help install failure; see https://github.com/r-lib/devtools/issues/2084#issuecomment-530912786
Rscript -e 'x <- file.path(R.home("doc"), "html"); if (!file.exists(x)) {dir.create(x, recursive=TRUE); file.copy(system.file("html/R.css", package="stats"), x)}'
|
<gh_stars>0
#include "Etterna/Globals/global.h"
#include "Etterna/Models/Misc/Difficulty.h"
#include "Etterna/Models/Misc/GameInput.h"
#include "Etterna/FileTypes/MsdFile.h"
#include "Etterna/Models/NoteData/NoteData.h"
#include "NotesLoader.h"
#include "NotesLoaderDWI.h"
#include "Etterna/Singletons/PrefsManager.h"
#include "RageUtil/Misc/RageLog.h"
#include "RageUtil/Utils/RageUtil.h"
#include "RageUtil/Utils/RageUtil_CharConversions.h"
#include "Etterna/Models/Songs/Song.h"
#include "Etterna/Models/StepsAndStyles/Steps.h"
#include <map>
Difficulty
DwiCompatibleStringToDifficulty(const RString& sDC);
/** @brief The different types of core DWI arrows and pads. */
enum DanceNotes
{
DANCE_NOTE_NONE = 0,
DANCE_NOTE_PAD1_LEFT,
DANCE_NOTE_PAD1_UPLEFT,
DANCE_NOTE_PAD1_DOWN,
DANCE_NOTE_PAD1_UP,
DANCE_NOTE_PAD1_UPRIGHT,
DANCE_NOTE_PAD1_RIGHT,
DANCE_NOTE_PAD2_LEFT,
DANCE_NOTE_PAD2_UPLEFT,
DANCE_NOTE_PAD2_DOWN,
DANCE_NOTE_PAD2_UP,
DANCE_NOTE_PAD2_UPRIGHT,
DANCE_NOTE_PAD2_RIGHT
};
/**
* @brief Turn the individual character to the proper note.
* @param c The character in question.
* @param i The player.
* @param note1Out The first result based on the character.
* @param note2Out The second result based on the character.
* @param sPath the path to the file.
*/
static void
DWIcharToNote(char c,
GameController i,
int& note1Out,
int& note2Out,
const RString& sPath)
{
switch (c) {
case '0':
note1Out = DANCE_NOTE_NONE;
note2Out = DANCE_NOTE_NONE;
break;
case '1':
note1Out = DANCE_NOTE_PAD1_DOWN;
note2Out = DANCE_NOTE_PAD1_LEFT;
break;
case '2':
note1Out = DANCE_NOTE_PAD1_DOWN;
note2Out = DANCE_NOTE_NONE;
break;
case '3':
note1Out = DANCE_NOTE_PAD1_DOWN;
note2Out = DANCE_NOTE_PAD1_RIGHT;
break;
case '4':
note1Out = DANCE_NOTE_PAD1_LEFT;
note2Out = DANCE_NOTE_NONE;
break;
case '5':
note1Out = DANCE_NOTE_NONE;
note2Out = DANCE_NOTE_NONE;
break;
case '6':
note1Out = DANCE_NOTE_PAD1_RIGHT;
note2Out = DANCE_NOTE_NONE;
break;
case '7':
note1Out = DANCE_NOTE_PAD1_UP;
note2Out = DANCE_NOTE_PAD1_LEFT;
break;
case '8':
note1Out = DANCE_NOTE_PAD1_UP;
note2Out = DANCE_NOTE_NONE;
break;
case '9':
note1Out = DANCE_NOTE_PAD1_UP;
note2Out = DANCE_NOTE_PAD1_RIGHT;
break;
case 'A':
note1Out = DANCE_NOTE_PAD1_UP;
note2Out = DANCE_NOTE_PAD1_DOWN;
break;
case 'B':
note1Out = DANCE_NOTE_PAD1_LEFT;
note2Out = DANCE_NOTE_PAD1_RIGHT;
break;
case 'C':
note1Out = DANCE_NOTE_PAD1_UPLEFT;
note2Out = DANCE_NOTE_NONE;
break;
case 'D':
note1Out = DANCE_NOTE_PAD1_UPRIGHT;
note2Out = DANCE_NOTE_NONE;
break;
case 'E':
note1Out = DANCE_NOTE_PAD1_LEFT;
note2Out = DANCE_NOTE_PAD1_UPLEFT;
break;
case 'F':
note1Out = DANCE_NOTE_PAD1_UPLEFT;
note2Out = DANCE_NOTE_PAD1_DOWN;
break;
case 'G':
note1Out = DANCE_NOTE_PAD1_UPLEFT;
note2Out = DANCE_NOTE_PAD1_UP;
break;
case 'H':
note1Out = DANCE_NOTE_PAD1_UPLEFT;
note2Out = DANCE_NOTE_PAD1_RIGHT;
break;
case 'I':
note1Out = DANCE_NOTE_PAD1_LEFT;
note2Out = DANCE_NOTE_PAD1_UPRIGHT;
break;
case 'J':
note1Out = DANCE_NOTE_PAD1_DOWN;
note2Out = DANCE_NOTE_PAD1_UPRIGHT;
break;
case 'K':
note1Out = DANCE_NOTE_PAD1_UP;
note2Out = DANCE_NOTE_PAD1_UPRIGHT;
break;
case 'L':
note1Out = DANCE_NOTE_PAD1_UPRIGHT;
note2Out = DANCE_NOTE_PAD1_RIGHT;
break;
case 'M':
note1Out = DANCE_NOTE_PAD1_UPLEFT;
note2Out = DANCE_NOTE_PAD1_UPRIGHT;
break;
default:
LOG->UserLog(
"Song file", sPath, "has an invalid DWI note character '%c'.", c);
note1Out = DANCE_NOTE_NONE;
note2Out = DANCE_NOTE_NONE;
break;
}
switch (i) {
case GameController_1:
break;
case GameController_2:
if (note1Out != DANCE_NOTE_NONE)
note1Out += 6;
if (note2Out != DANCE_NOTE_NONE)
note2Out += 6;
break;
default:
FAIL_M(ssprintf("Invalid GameController: %i", i));
}
}
/**
* @brief Determine the note column[s] to place notes.
* @param c The character in question.
* @param i The player.
* @param col1Out The first result based on the character.
* @param col2Out The second result based on the character.
* @param sPath the path to the file.
* @param mapDanceNoteToColumn a map to pass to keep track of column info.
*/
static void
DWIcharToNoteCol(char c,
GameController i,
int& col1Out,
int& col2Out,
const RString& sPath,
map<int, int>& mapDanceNoteToColumn)
{
int note1, note2;
DWIcharToNote(c, i, note1, note2, sPath);
if (note1 != DANCE_NOTE_NONE)
col1Out = mapDanceNoteToColumn[note1];
else
col1Out = -1;
if (note2 != DANCE_NOTE_NONE)
col2Out = mapDanceNoteToColumn[note2];
else
col2Out = -1;
}
/**
* @brief Determine if the note in question is a 192nd note.
*
* DWI used to use <...> to indicate 1/192nd notes; at some
* point, <...> was changed to indicate jumps, and `' was used for
* 1/192nds. So, we have to do a check to figure out what it really
* means. If it contains 0s, it's most likely 192nds; otherwise,
* it's most likely a jump. Search for a 0 before the next >:
* @param sStepData the step data.
* @param pos the position of the step data.
* @return true if it's a 192nd note, false otherwise.
*/
static bool
Is192(const RString& sStepData, size_t pos)
{
while (pos < sStepData.size()) {
if (sStepData[pos] == '>')
return false;
if (sStepData[pos] == '0')
return true;
++pos;
}
return false;
}
/** @brief All DWI files use 4 beats per measure. */
const int BEATS_PER_MEASURE = 4;
/* We prefer the normal names; recognize a number of others, too. (They'll get
* normalized when written to SMs, etc.) */
Difficulty
DwiCompatibleStringToDifficulty(const RString& sDC)
{
RString s2 = sDC;
s2.MakeLower();
if (s2 == "beginner")
return Difficulty_Beginner;
if (s2 == "easy")
return Difficulty_Easy;
if (s2 == "basic")
return Difficulty_Easy;
else if (s2 == "light")
return Difficulty_Easy;
else if (s2 == "medium")
return Difficulty_Medium;
else if (s2 == "another")
return Difficulty_Medium;
else if (s2 == "trick")
return Difficulty_Medium;
else if (s2 == "standard")
return Difficulty_Medium;
else if (s2 == "difficult")
return Difficulty_Medium;
else if (s2 == "hard")
return Difficulty_Hard;
else if (s2 == "ssr")
return Difficulty_Hard;
else if (s2 == "maniac")
return Difficulty_Hard;
else if (s2 == "heavy")
return Difficulty_Hard;
else if (s2 == "smaniac")
return Difficulty_Challenge;
else if (s2 == "challenge")
return Difficulty_Challenge;
else if (s2 == "expert")
return Difficulty_Challenge;
else if (s2 == "oni")
return Difficulty_Challenge;
else if (s2 == "edit")
return Difficulty_Edit;
else
return Difficulty_Invalid;
}
static StepsType
GetTypeFromMode(const RString& mode)
{
if (mode == "SINGLE")
return StepsType_dance_single;
if (mode == "DOUBLE")
return StepsType_dance_double;
else if (mode == "SOLO")
return StepsType_dance_solo;
return StepsType_Invalid; // just in case.
}
static NoteData
ParseNoteData(RString& step1, RString& step2, Steps& out, const RString& path)
{
std::map<int, int> g_mapDanceNoteToNoteDataColumn;
switch (out.m_StepsType) {
case StepsType_dance_single:
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_LEFT] = 0;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_DOWN] = 1;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_UP] = 2;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_RIGHT] = 3;
break;
case StepsType_dance_double:
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_LEFT] = 0;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_DOWN] = 1;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_UP] = 2;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_RIGHT] = 3;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD2_LEFT] = 4;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD2_DOWN] = 5;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD2_UP] = 6;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD2_RIGHT] = 7;
break;
case StepsType_dance_solo:
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_LEFT] = 0;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_UPLEFT] = 1;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_DOWN] = 2;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_UP] = 3;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_UPRIGHT] = 4;
g_mapDanceNoteToNoteDataColumn[DANCE_NOTE_PAD1_RIGHT] = 5;
break;
DEFAULT_FAIL(out.m_StepsType);
}
NoteData newNoteData;
newNoteData.SetNumTracks(g_mapDanceNoteToNoteDataColumn.size());
for (int pad = 0; pad < 2; pad++) // foreach pad
{
RString sStepData;
switch (pad) {
case 0:
sStepData = step1;
break;
case 1:
if (step2 == "") // no data
continue; // skip
sStepData = step2;
break;
DEFAULT_FAIL(pad);
}
sStepData.Replace("\n", "");
sStepData.Replace("\r", "");
sStepData.Replace("\t", "");
sStepData.Replace(" ", "");
double fCurrentBeat = 0;
double fCurrentIncrementer = 1.0 / 8 * BEATS_PER_MEASURE;
for (size_t i = 0; i < sStepData.size();) {
char c = sStepData[i++];
switch (c) {
// begins a series
case '(':
fCurrentIncrementer = 1.0 / 16 * BEATS_PER_MEASURE;
break;
case '[':
fCurrentIncrementer = 1.0 / 24 * BEATS_PER_MEASURE;
break;
case '{':
fCurrentIncrementer = 1.0 / 64 * BEATS_PER_MEASURE;
break;
case '`':
fCurrentIncrementer = 1.0 / 192 * BEATS_PER_MEASURE;
break;
// ends a series
case ')':
case ']':
case '}':
case '\'':
case '>':
fCurrentIncrementer = 1.0 / 8 * BEATS_PER_MEASURE;
break;
default
: // this is a note character
{
if (c == '!') {
LOG->UserLog("Song file",
path,
"has an unexpected character: '!'.");
continue;
}
bool jump = false;
if (c == '<') {
/* Arr. Is this a jump or a 1/192 marker? */
if (Is192(sStepData, i)) {
fCurrentIncrementer = 1.0 / 192 * BEATS_PER_MEASURE;
break;
}
/* It's a jump.
* We need to keep reading notes until we hit a >. */
jump = true;
i++;
}
const int iIndex =
BeatToNoteRow(static_cast<float>(fCurrentBeat));
i--;
do {
c = sStepData[i++];
if (jump && c == '>')
break;
int iCol1, iCol2;
DWIcharToNoteCol(c,
(GameController)pad,
iCol1,
iCol2,
path,
g_mapDanceNoteToNoteDataColumn);
if (iCol1 != -1)
newNoteData.SetTapNote(
iCol1, iIndex, TAP_ORIGINAL_TAP);
if (iCol2 != -1)
newNoteData.SetTapNote(
iCol2, iIndex, TAP_ORIGINAL_TAP);
if (i >= sStepData.length()) {
break;
// we ran out of data
// while looking for the ending > mark
}
if (sStepData[i] == '!') {
i++;
const char holdChar = sStepData[i++];
DWIcharToNoteCol(holdChar,
(GameController)pad,
iCol1,
iCol2,
path,
g_mapDanceNoteToNoteDataColumn);
if (iCol1 != -1)
newNoteData.SetTapNote(
iCol1, iIndex, TAP_ORIGINAL_HOLD_HEAD);
if (iCol2 != -1)
newNoteData.SetTapNote(
iCol2, iIndex, TAP_ORIGINAL_HOLD_HEAD);
}
} while (jump);
fCurrentBeat += fCurrentIncrementer;
} break;
}
}
}
/* Fill in iDuration. */
for (int t = 0; t < newNoteData.GetNumTracks(); ++t) {
FOREACH_NONEMPTY_ROW_IN_TRACK(newNoteData, t, iHeadRow)
{
TapNote tn = newNoteData.GetTapNote(t, iHeadRow);
if (tn.type != TapNoteType_HoldHead)
continue;
int iTailRow = iHeadRow;
bool bFound = false;
while (!bFound &&
newNoteData.GetNextTapNoteRowForTrack(t, iTailRow)) {
const TapNote& TailTap = newNoteData.GetTapNote(t, iTailRow);
if (TailTap.type == TapNoteType_Empty)
continue;
newNoteData.SetTapNote(t, iTailRow, TAP_EMPTY);
tn.iDuration = iTailRow - iHeadRow;
newNoteData.SetTapNote(t, iHeadRow, tn);
bFound = true;
}
if (!bFound) {
/* The hold was never closed. */
LOG->UserLog(
"Song file",
path,
"failed to close a hold note in \"%s\" on track %i",
DifficultyToString(out.GetDifficulty()).c_str(),
t);
newNoteData.SetTapNote(t, iHeadRow, TAP_EMPTY);
}
}
}
ASSERT(newNoteData.GetNumTracks() > 0);
return newNoteData;
}
/**
* @brief Look through the notes tag to extract the data.
* @param sMode the steps type.
* @param sDescription the difficulty.
* @param sNumFeet the meter.
* @param sStepData1 the guaranteed step data.
* @param sStepData2 used if sMode is double or couple.
* @param out the step data.
* @param sPath the path to the file.
* @return the success or failure of the operation.
*/
static bool
LoadFromDWITokens(RString sMode,
RString sDescription,
RString sNumFeet,
RString sStepData1,
RString sStepData2,
Steps& out,
const RString& sPath)
{
CHECKPOINT_M("DWILoader::LoadFromDWITokens()");
out.m_StepsType = GetTypeFromMode(sMode);
if (out.m_StepsType == StepsType_Invalid)
return false;
// if the meter is empty, force it to 1.
if (sNumFeet.empty())
sNumFeet = "1";
out.SetMeter(StringToInt(sNumFeet));
out.SetDifficulty(DwiCompatibleStringToDifficulty(sDescription));
out.SetNoteData(ParseNoteData(sStepData1, sStepData2, out, sPath));
out.TidyUpData();
out.SetSavedToDisk(
true); // we're loading from disk, so this is by definintion already saved
return true;
}
/**
* @brief Turn the DWI style timestamp into a compatible time for our system.
*
* This value can be in either "HH:MM:SS.sssss", "MM:SS.sssss", "SSS.sssss"
* or milliseconds.
* @param arg1 Either hours, minutes, or seconds, depending on other args.
* @param arg2 Either minutes or seconds, depending on other args.
* @param arg3 Seconds if not empty.
* @return the proper timestamp.
*/
static float
ParseBrokenDWITimestamp(const RString& arg1,
const RString& arg2,
const RString& arg3)
{
if (arg1.empty())
return 0;
/* 1+ args */
if (arg2.empty()) {
/* If the value contains a period, treat it as seconds; otherwise ms. */
if (arg1.find_first_of(".") != arg1.npos)
return StringToFloat(arg1);
return StringToFloat(arg1) / 1000.f;
}
/* 2+ args */
if (arg3.empty())
return HHMMSSToSeconds(arg1 + ":" + arg2);
/* 3+ args */
return HHMMSSToSeconds(arg1 + ":" + arg2 + ":" + arg3);
}
void
DWILoader::GetApplicableFiles(const RString& sPath, vector<RString>& out)
{
GetDirListing(sPath + RString("*.dwi"), out);
}
bool
DWILoader::LoadNoteDataFromSimfile(const RString& path, Steps& out)
{
MsdFile msd;
if (!msd.ReadFile(path, false)) // don't unescape
{
LOG->UserLog(
"Song file", path, "couldn't be opened: %s", msd.GetError().c_str());
return false;
}
for (unsigned i = 0; i < msd.GetNumValues(); i++) {
int iNumParams = msd.GetNumParams(i);
const MsdFile::value_t& params = msd.GetValue(i);
RString valueName = params[0];
if (valueName.EqualsNoCase("SINGLE") ||
valueName.EqualsNoCase("DOUBLE") ||
valueName.EqualsNoCase("COUPLE") ||
valueName.EqualsNoCase("SOLO")) {
if (out.m_StepsType != GetTypeFromMode(valueName))
continue;
if (out.GetDifficulty() !=
DwiCompatibleStringToDifficulty(params[1]) &&
out.GetDescription().find(
DifficultyToString(
DwiCompatibleStringToDifficulty(params[1])) +
" Edit") == RString::npos)
continue;
if (out.GetMeter() != StringToInt(params[2]))
continue;
RString step1 = params[3];
RString step2 = (iNumParams == 5) ? params[4] : RString("");
out.SetNoteData(ParseNoteData(step1, step2, out, path));
return true;
}
}
return false;
}
bool
DWILoader::LoadFromDir(const RString& sPath_,
Song& out,
set<RString>& BlacklistedImages)
{
vector<RString> aFileNames;
GetApplicableFiles(sPath_, aFileNames);
if (aFileNames.size() > 1) {
LOG->UserLog("Song",
sPath_,
"has more than one DWI file. There should be only one!");
return false;
}
/* We should have exactly one; if we had none, we shouldn't have been called
* to begin with. */
ASSERT(aFileNames.size() == 1);
const RString sPath = sPath_ + aFileNames[0];
LOG->Trace("Song::LoadFromDWIFile(%s)", sPath.c_str());
MsdFile msd;
if (!msd.ReadFile(sPath, false)) // don't unescape
{
LOG->UserLog(
"Song file", sPath, "couldn't be opened: %s", msd.GetError().c_str());
return false;
}
out.m_sSongFileName = sPath;
for (unsigned i = 0; i < msd.GetNumValues(); i++) {
int iNumParams = msd.GetNumParams(i);
const MsdFile::value_t& sParams = msd.GetValue(i);
RString sValueName = sParams[0];
if (iNumParams < 1) {
LOG->UserLog("Song file",
sPath,
"has tag \"%s\" with no parameters.",
sValueName.c_str());
continue;
}
// handle the data
if (sValueName.EqualsNoCase("FILE"))
out.m_sMusicFile = sParams[1];
else if (sValueName.EqualsNoCase("TITLE")) {
NotesLoader::GetMainAndSubTitlesFromFullTitle(
sParams[1], out.m_sMainTitle, out.m_sSubTitle);
/* As far as I know, there's no spec on the encoding of this text.
* (I didn't look very hard, though.) I've seen at least one file
* in ISO-8859-1. */
ConvertString(out.m_sMainTitle, "utf-8,english");
ConvertString(out.m_sSubTitle, "utf-8,english");
}
else if (sValueName.EqualsNoCase("ARTIST")) {
out.m_sArtist = sParams[1];
ConvertString(out.m_sArtist, "utf-8,english");
}
else if (sValueName.EqualsNoCase("GENRE")) {
out.m_sGenre = sParams[1];
ConvertString(out.m_sGenre, "utf-8,english");
}
else if (sValueName.EqualsNoCase("CDTITLE"))
out.m_sCDTitleFile = sParams[1];
else if (sValueName.EqualsNoCase("BPM")) {
const float fBPM = StringToFloat(sParams[1]);
if (unlikely(fBPM <= 0.0f)) {
LOG->UserLog("Song file",
sPath,
"has an invalid BPM change at beat %f, BPM %f.",
0.0f,
fBPM);
} else {
out.m_SongTiming.AddSegment(BPMSegment(0, fBPM));
}
} else if (sValueName.EqualsNoCase("DISPLAYBPM")) {
// #DISPLAYBPM:[xxx..xxx]|[xxx]|[*];
int iMin, iMax;
/* We can't parse this as a float with sscanf, since '.' is a valid
* character in a float. (We could do it with a regex, but it's not
* worth bothering with since we don't display fractional BPM
* anyway.) */
if (sscanf(sParams[1], "%i..%i", &iMin, &iMax) == 2) {
out.m_DisplayBPMType = DISPLAY_BPM_SPECIFIED;
out.m_fSpecifiedBPMMin = static_cast<float>(iMin);
out.m_fSpecifiedBPMMax = static_cast<float>(iMax);
} else if (sscanf(sParams[1], "%i", &iMin) == 1) {
out.m_DisplayBPMType = DISPLAY_BPM_SPECIFIED;
out.m_fSpecifiedBPMMin = out.m_fSpecifiedBPMMax =
static_cast<float>(iMin);
} else {
out.m_DisplayBPMType = DISPLAY_BPM_RANDOM;
}
}
else if (sValueName.EqualsNoCase("GAP"))
// the units of GAP is 1/1000 second
out.m_SongTiming.m_fBeat0OffsetInSeconds =
-StringToInt(sParams[1]) / 1000.0f;
else if (sValueName.EqualsNoCase("SAMPLESTART"))
out.m_fMusicSampleStartSeconds =
ParseBrokenDWITimestamp(sParams[1], sParams[2], sParams[3]);
else if (sValueName.EqualsNoCase("SAMPLELENGTH")) {
float sampleLength =
ParseBrokenDWITimestamp(sParams[1], sParams[2], sParams[3]);
if (sampleLength > 0 && sampleLength < 1) {
// there were multiple versions of this tag allegedly: ensure a
// decent length if requested.
sampleLength *= 1000;
}
out.m_fMusicSampleLengthSeconds = sampleLength;
}
else if (sValueName.EqualsNoCase("FREEZE")) {
vector<RString> arrayFreezeExpressions;
split(sParams[1], ",", arrayFreezeExpressions);
for (unsigned f = 0; f < arrayFreezeExpressions.size(); f++) {
vector<RString> arrayFreezeValues;
split(arrayFreezeExpressions[f], "=", arrayFreezeValues);
if (arrayFreezeValues.size() != 2) {
LOG->UserLog("Song file",
sPath,
"has an invalid FREEZE: '%s'.",
arrayFreezeExpressions[f].c_str());
continue;
}
int iFreezeRow =
BeatToNoteRow(StringToFloat(arrayFreezeValues[0]) / 4.0f);
float fFreezeSeconds =
StringToFloat(arrayFreezeValues[1]) / 1000.0f;
out.m_SongTiming.AddSegment(
StopSegment(iFreezeRow, fFreezeSeconds));
// LOG->Trace( "Adding a freeze segment: beat: %f,
// seconds = %f", fFreezeBeat, fFreezeSeconds );
}
}
else if (sValueName.EqualsNoCase("CHANGEBPM") ||
sValueName.EqualsNoCase("BPMCHANGE")) {
vector<RString> arrayBPMChangeExpressions;
split(sParams[1], ",", arrayBPMChangeExpressions);
for (unsigned b = 0; b < arrayBPMChangeExpressions.size(); b++) {
vector<RString> arrayBPMChangeValues;
split(arrayBPMChangeExpressions[b], "=", arrayBPMChangeValues);
if (arrayBPMChangeValues.size() != 2) {
LOG->UserLog("Song file",
sPath,
"has an invalid CHANGEBPM: '%s'.",
arrayBPMChangeExpressions[b].c_str());
continue;
}
int iStartIndex =
BeatToNoteRow(StringToFloat(arrayBPMChangeValues[0]) / 4.0f);
float fBPM = StringToFloat(arrayBPMChangeValues[1]);
if (fBPM > 0.0f)
out.m_SongTiming.AddSegment(BPMSegment(iStartIndex, fBPM));
else
LOG->UserLog(
"Song file",
sPath,
"has an invalid BPM change at beat %f, BPM %f.",
NoteRowToBeat(iStartIndex),
fBPM);
}
}
else if (sValueName.EqualsNoCase("SINGLE") ||
sValueName.EqualsNoCase("DOUBLE") ||
sValueName.EqualsNoCase("COUPLE") ||
sValueName.EqualsNoCase("SOLO")) {
Steps* pNewNotes = out.CreateSteps();
LoadFromDWITokens(sParams[0],
sParams[1],
sParams[2],
sParams[3],
(iNumParams == 5) ? sParams[4] : RString(""),
*pNewNotes,
sPath);
if (pNewNotes->m_StepsType != StepsType_Invalid) {
pNewNotes->SetFilename(sPath);
out.AddSteps(pNewNotes);
} else
delete pNewNotes;
} else if (sValueName.EqualsNoCase("DISPLAYTITLE") ||
sValueName.EqualsNoCase("DISPLAYARTIST")) {
/* We don't want to support these tags. However, we don't want
* to pick up images used here as song images (eg. banners). */
RString param = sParams[1];
/* "{foo} ... {foo2}" */
size_t pos = 0;
while (pos < RString::npos) {
size_t startpos = param.find('{', pos);
if (startpos == RString::npos)
break;
size_t endpos = param.find('}', startpos);
if (endpos == RString::npos)
break;
RString sub = param.substr(startpos + 1, endpos - startpos - 1);
pos = endpos + 1;
sub.MakeLower();
BlacklistedImages.insert(sub);
}
} else {
// do nothing. We don't care about this value name
}
}
return true;
}
|
package jblocks;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import javax.imageio.ImageIO;
import org.apache.batik.dom.svg.SAXSVGDocumentFactory;
import org.apache.batik.util.XMLResourceDescriptor;
import org.jsfml.graphics.Image;
import org.jsfml.graphics.Texture;
import org.jsfml.graphics.TextureCreationException;
import org.jsfml.system.Vector2f;
import org.w3c.dom.svg.SVGDocument;
import com.fasterxml.jackson.databind.JsonNode;
public class Costume extends Asset {
Image image;
Texture texture;
Vector2f center;
float resolution;
public Costume(JsonNode config) {
super(config);
name = config.path("costumeName").textValue();
hash = config.path("baseLayerMD5").textValue();
center = new Vector2f(config.path("rotationCenterX").floatValue(), config.path("rotationCenterY").floatValue());
resolution = config.path("bitmapResolution").floatValue();
try {
loadAsset();
} catch (IOException e) {
System.out.println("Could not load asset: " + e.getMessage());
}
}
static final boolean CACHE = true;
@Override
public void loadAsset() throws IOException {
image = new Image();
texture = new Texture();
System.out.println("Loading " + hash);
InputStream stream = null;
String url = "http://cdn.assets.scratch.mit.edu/internalapi/asset/" + hash + "/get/";
if (CACHE) {
File directory = new File(".cache");
if (!directory.exists()) {
directory.mkdir();
}
File f = new File(".cache", hash);
if (!f.exists()) {
InputStream http = new URL(url).openStream();
byte[] buffer = new byte[1024];
int n = -1;
OutputStream output = new FileOutputStream(f);
while ((n = http.read(buffer)) != -1) {
output.write(buffer, 0, n);
}
output.close();
}
stream = new FileInputStream(f);
} else {
stream = new URL(url).openStream();
}
if (hash.endsWith(".svg")) {
SVGDocument document = (SVGDocument) new SAXSVGDocumentFactory(XMLResourceDescriptor.getXMLParserClassName()).createDocument("http://scratch.mit.edu/", stream);
BufferedImage img = new SvgImage(document).getImage();
ImageIO.write(img, "png", new File(".cache", hash + ".png"));
image.create(img);
} else {
image.create(ImageIO.read(stream));
}
try {
texture.loadFromImage(image);
} catch (TextureCreationException e) {
System.err.println(e.getMessage());
}
}
public Texture getTexture() {
return texture;
}
public Image getImage() {
return image;
}
public Vector2f getCenter() {
return center;
}
public float getResolution() {
return resolution;
}
}
|
<reponame>xcorail/OTB
/*
* Copyright (C) 2005-2017 by Centre National d'Etudes Spatiales (CNES)
*
* This file is licensed under MIT license:
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <cmath>
#include <cstdio>
#include <ossim/base/ossimTrace.h>
#include <ossim/base/ossimKeywordNames.h>
#include <ossimEnvisatAsarModel.h>
#include <otb/GalileanEphemeris.h>
#include <otb/GeographicEphemeris.h>
#include <otb/GMSTDateTime.h>
#include <otb/CivilDateTime.h>
#include <otb/MJDDateTime.h>
#include <otb/PlatformPosition.h>
#include <otb/SensorParams.h>
#include <otb/RefPoint.h>
#include <otb/SarSensor.h>
namespace ossimplugins
{
RTTI_DEF1(ossimEnvisatAsarModel, "ossimEnvisatAsarModel", ossimGeometricSarSensorModel);
// Static trace for debugging
static ossimTrace traceDebug("ossimEnvisatAsarModel:debug");
ossimEnvisatAsarModel::ossimEnvisatAsarModel():
_n_srgr(0),
_pixel_spacing(0),
_EnvisatAsarData(NULL)
{
}
ossimEnvisatAsarModel::~ossimEnvisatAsarModel()
{
if(_EnvisatAsarData != NULL)
{
delete _EnvisatAsarData;
}
}
ossimString ossimEnvisatAsarModel::getClassName() const
{
return ossimString("ossimEnvisatAsarModel");
}
ossimObject* ossimEnvisatAsarModel::dup() const
{
return new ossimEnvisatAsarModel(*this);
}
double ossimEnvisatAsarModel::getSlantRangeFromGeoreferenced(double col) const
{
const double CLUM = 2.99792458e+8 ;
double GR, GR0, relativeGroundRange, slantRange, slantrangetime;
if (_n_srgr==0) return(-1) ;
std::vector<double> coefset = FindSRGRSetNumber((_refPoint->get_ephemeris())->get_date()) ;
GR = _sensor->get_col_direction() * (col)* _pixel_spacing ;
GR0 = coefset[0];
relativeGroundRange = GR-GR0;
slantRange = coefset[1]
+ coefset[2]*relativeGroundRange
+ coefset[3]*pow(relativeGroundRange,2)
+ coefset[4]*pow(relativeGroundRange,3)
+ coefset[5]*pow(relativeGroundRange,4);
slantrangetime = 2.0*slantRange / (CLUM);
return slantRange ;
}
bool ossimEnvisatAsarModel::open(const ossimFilename& file)
{
bool retValue = true;
if (traceDebug())
{
ossimNotify(ossimNotifyLevel_DEBUG) << "ossimEnvisatAsarTileSource::open(filename) DEBUG: entered..." << std::endl
<< "Attempting to open file " << file << std::endl;
}
/*
* Creation of the class allowing to store EnvisatAsarData file metadata
*/
if (_EnvisatAsarData != NULL)
{
delete _EnvisatAsarData;
_EnvisatAsarData = NULL;
}
/*
* Opening and test of the file
*/
ossimFilename Filename = file;
ifstream dataFile(Filename.c_str(), ios::in | ios::binary);
if (dataFile.eof())
{
dataFile.close();
retValue = false;
}
else
{
mph mph_rec;
// test if it is an ENVISAT Data file
if (dataFile >> mph_rec && mph_rec.is_valid())
{
if (traceDebug())
{
ossimNotify(ossimNotifyLevel_DEBUG) << "Begin reading EnvisatAsar file" << std::endl;
}
/*
* Reading EnvisatAsarData file data
*/
dataFile.seekg(0);
_EnvisatAsarData = new EnvisatAsarData();
dataFile >> *_EnvisatAsarData;
dataFile.close();
if (traceDebug())
{
ossimNotify(ossimNotifyLevel_DEBUG) << "End reading EnvisatAsar file" << std::endl;
}
}
else
{
dataFile.close();
retValue = false;
}
}
if (traceDebug())
{
ossimNotify(ossimNotifyLevel_DEBUG) << "ossimEnvisatAsarTileSource::open() DEBUG: returning..." << std::endl;
}
return retValue;
}
bool ossimEnvisatAsarModel::saveState(ossimKeywordlist& kwl,
const char* prefix) const
{
char name[64];
kwl.add(prefix, ossimKeywordNames::TYPE_KW, "ossimEnvisatAsarModel", true);
/*
* Adding metadata necessary to the sensor model into the keywordlist
* Data derived from the SPH record
*/
sph* sph_rec = _EnvisatAsarData->get_sph();
if(sph_rec != NULL)
{
kwl.add(prefix, "pixel_spacing", sph_rec->get_range_spacing().c_str(), true );
kwl.add(prefix, "line_time_interval", sph_rec->get_line_time_interval().c_str(), true );
}
else
{
return false;
}
/*
* Adding metadata necessary to the sensor model into the keywordlist
* Data derived from the Main Processing Parameters record
*/
MainProcessingParameters* MPP_rec = _EnvisatAsarData->get_MainProcessingParameters();
if(MPP_rec != NULL)
{
kwl.add(prefix, "num_pix", (double)MPP_rec->get_num_samples_per_line(), true );
kwl.add(prefix, "num_lines", (double)MPP_rec->get_num_output_lines(), true );
kwl.add(prefix, "wave_length", MPP_rec->get_radar_freq(), true );
kwl.add(prefix, "range_sampling_rate", MPP_rec->get_range_samp_rate(), true );
kwl.add(prefix, "prf", MPP_rec->get_prf(), true );
kwl.add(prefix, "avg_scene_height", MPP_rec->get_avg_scene_height_ellpsoid(), true );
kwl.add(prefix, "is_groundrange", MPP_rec->get_srgr_flag(), true );
kwl.add(prefix, "state_vector_time_1_day", (double)MPP_rec->get_state_vector_time_1_day(), true );
kwl.add(prefix, "state_vector_time_1_sec", (double)MPP_rec->get_state_vector_time_1_sec(), true );
kwl.add(prefix, "state_vector_time_1_microsec", (double)MPP_rec->get_state_vector_time_1_microsec(), true );
kwl.add(prefix, "x_pos_1", (double)MPP_rec->get_x_pos_1(), true );
kwl.add(prefix, "y_pos_1", (double)MPP_rec->get_y_pos_1(), true );
kwl.add(prefix, "z_pos_1", (double)MPP_rec->get_z_pos_1(), true );
kwl.add(prefix, "x_vel_1", (double)MPP_rec->get_x_vel_1(), true );
kwl.add(prefix, "y_vel_1", (double)MPP_rec->get_y_vel_1(), true );
kwl.add(prefix, "z_vel_1", (double)MPP_rec->get_z_vel_1(), true );
kwl.add(prefix, "state_vector_time_2_day",(double) MPP_rec->get_state_vector_time_2_day(), true );
kwl.add(prefix, "state_vector_time_2_sec", (double)MPP_rec->get_state_vector_time_2_sec(), true );
kwl.add(prefix, "state_vector_time_2_microsec", (double)MPP_rec->get_state_vector_time_2_microsec(), true );
kwl.add(prefix, "x_pos_2", (double)MPP_rec->get_x_pos_2(), true );
kwl.add(prefix, "y_pos_2", (double)MPP_rec->get_y_pos_2(), true );
kwl.add(prefix, "z_pos_2", (double)MPP_rec->get_z_pos_2(), true );
kwl.add(prefix, "x_vel_2", (double)MPP_rec->get_x_vel_2(), true );
kwl.add(prefix, "y_vel_2", (double)MPP_rec->get_y_vel_2(), true );
kwl.add(prefix, "z_vel_2", (double)MPP_rec->get_z_vel_2(), true );
kwl.add(prefix, "state_vector_time_3_day", (double)MPP_rec->get_state_vector_time_3_day(), true );
kwl.add(prefix, "state_vector_time_3_sec", (double)MPP_rec->get_state_vector_time_3_sec(), true );
kwl.add(prefix, "state_vector_time_3_microsec", (double)MPP_rec->get_state_vector_time_3_microsec(), true );
kwl.add(prefix, "x_pos_3", (double)MPP_rec->get_x_pos_3(), true );
kwl.add(prefix, "y_pos_3", (double)MPP_rec->get_y_pos_3(), true );
kwl.add(prefix, "z_pos_3", (double)MPP_rec->get_z_pos_3(), true );
kwl.add(prefix, "x_vel_3", (double)MPP_rec->get_x_vel_3(), true );
kwl.add(prefix, "y_vel_3", (double)MPP_rec->get_y_vel_3(), true );
kwl.add(prefix, "z_vel_3", (double)MPP_rec->get_z_vel_3(), true );
kwl.add(prefix, "state_vector_time_4_day", (double)MPP_rec->get_state_vector_time_4_day(), true );
kwl.add(prefix, "state_vector_time_4_sec", (double)MPP_rec->get_state_vector_time_4_sec(), true );
kwl.add(prefix, "state_vector_time_4_microsec", (double)MPP_rec->get_state_vector_time_4_microsec(), true );
kwl.add(prefix, "x_pos_4", (double)MPP_rec->get_x_pos_4(), true );
kwl.add(prefix, "y_pos_4", (double)MPP_rec->get_y_pos_4(), true );
kwl.add(prefix, "z_pos_4", (double)MPP_rec->get_z_pos_4(), true );
kwl.add(prefix, "x_vel_4", (double)MPP_rec->get_x_vel_4(), true );
kwl.add(prefix, "y_vel_4", (double)MPP_rec->get_y_vel_4(), true );
kwl.add(prefix, "z_vel_4", (double)MPP_rec->get_z_vel_4(), true );
kwl.add(prefix, "state_vector_time_5_day", (double)MPP_rec->get_state_vector_time_5_day(), true );
kwl.add(prefix, "state_vector_time_5_sec", (double)MPP_rec->get_state_vector_time_5_sec(), true );
kwl.add(prefix, "state_vector_time_5_microsec", (double)MPP_rec->get_state_vector_time_5_microsec(), true );
kwl.add(prefix, "x_pos_5", (double)MPP_rec->get_x_pos_5(), true );
kwl.add(prefix, "y_pos_5",(double) MPP_rec->get_y_pos_5(), true );
kwl.add(prefix, "z_pos_5", (double)MPP_rec->get_z_pos_5(), true );
kwl.add(prefix, "x_vel_5", (double)MPP_rec->get_x_vel_5(), true );
kwl.add(prefix, "y_vel_5", (double)MPP_rec->get_y_vel_5(), true );
kwl.add(prefix, "z_vel_5", (double)MPP_rec->get_z_vel_5(), true );
}
else
{
return false;
}
/*
* Adding metadata necessary to the sensor model into the keywordlist
* Data derived from the Geolocation Grid record - Reference Point
*/
GeolocationGrid* GG_rec = _EnvisatAsarData->get_GeolocationGrid(0);
if(GG_rec != NULL)
{
kwl.add(prefix, "first_zero_doppler_time_day", (double)GG_rec->get_first_zero_doppler_time_day(), true );
kwl.add(prefix, "first_zero_doppler_time_sec", (double)GG_rec->get_first_zero_doppler_time_sec(), true );
kwl.add(prefix, "first_zero_doppler_time_microsec", (double)GG_rec->get_first_zero_doppler_time_microsec(), true );
kwl.add(prefix, "line_num", (double)GG_rec->get_line_num(), true );
kwl.add(prefix, "samp_num", (double)(GG_rec->get_samp_numbers())[0], true );
kwl.add(prefix, "slant_range_time", (double)(GG_rec->get_slant_range_times())[0], true );
}
else
{
return false;
}
/*
* Adding metadata necessary to the sensor model into the keywordlist
* Data derived from the Geolocation Grid record - Corners
*/
GG_rec = _EnvisatAsarData->get_GeolocationGrid(0);
if(GG_rec != NULL)
{
kwl.add(prefix, "UL_line", (double)GG_rec->get_line_num(), true );
kwl.add(prefix, "UL_col", (double)(GG_rec->get_samp_numbers())[0], true );
kwl.add(prefix, "UL_lat", (double)(GG_rec->get_lats())[0], true );
kwl.add(prefix, "UL_lon", (double)(GG_rec->get_longs())[0], true );
kwl.add(prefix, "UR_line", (double)GG_rec->get_line_num(), true );
kwl.add(prefix, "UR_col", (double)(GG_rec->get_samp_numbers())[10], true );
kwl.add(prefix, "UR_lat", (double)(GG_rec->get_lats())[10], true );
kwl.add(prefix, "UR_lon", (double)(GG_rec->get_longs())[10], true );
}
else
{
return false;
}
GG_rec = _EnvisatAsarData->get_GeolocationGrid(10);
if(GG_rec != NULL)
{
kwl.add(prefix, "LL_line", (double)GG_rec->get_line_num(), true );
kwl.add(prefix, "LL_col", (double)(GG_rec->get_samp_numbers())[0], true );
kwl.add(prefix, "LL_lat", (double)(GG_rec->get_lats())[0], true );
kwl.add(prefix, "LL_lon", (double)(GG_rec->get_longs())[0], true );
kwl.add(prefix, "LR_line", (double)GG_rec->get_line_num(), true );
kwl.add(prefix, "LR_col", (double)(GG_rec->get_samp_numbers())[10], true );
kwl.add(prefix, "LR_lat", (double)(GG_rec->get_lats())[10], true );
kwl.add(prefix, "LR_lon", (double)(GG_rec->get_longs())[10], true );
}
else
{
return false;
}
/*
* Adding metadata necessary to the sensor model into the keywordlist
*/
int n_srgr = 0;
SRGRConversionParameters * SRGRParameters = _EnvisatAsarData->get_SRGRConversionParameters(0);
if(SRGRParameters != NULL)
{
n_srgr = _EnvisatAsarData->get_num_ds(SRGRParameters);
for (int i=0;i<n_srgr;i++)
{
SRGRConversionParameters * SRGR = _EnvisatAsarData->get_SRGRConversionParameters(i);
sprintf(name,"srgr_update_day%i",i);
kwl.add(prefix, name, (double)SRGR->get_first_zero_doppler_time_day(),true);
sprintf(name,"srgr_update_sec%i",i);
kwl.add(prefix, name, (double)SRGR->get_first_zero_doppler_time_sec(),true);
sprintf(name,"srgr_update_microsec%i",i);
kwl.add(prefix, name, (double)SRGR->get_first_zero_doppler_time_microsec(),true);
sprintf(name,"srgr_coef%iA",i);
kwl.add(prefix, name,(double)SRGR->get_ground_range_origin(),true);
sprintf(name,"srgr_coef%iB",i);
kwl.add(prefix, name,(double)SRGR->get_srgr_coef()[0],true);
sprintf(name,"srgr_coef%iC",i);
kwl.add(prefix, name,(double)SRGR->get_srgr_coef()[1],true);
sprintf(name,"srgr_coef%iD",i);
kwl.add(prefix, name,(double)SRGR->get_srgr_coef()[2],true);
sprintf(name,"srgr_coef%iE",i);
kwl.add(prefix, name,(double)SRGR->get_srgr_coef()[3]*1e10,true);
sprintf(name,"srgr_coef%iF",i);
kwl.add(prefix, name,(double)SRGR->get_srgr_coef()[4]*1e10,true);
}
}
kwl.add(prefix, "n_srgr" , n_srgr,true);
return true;
}
bool ossimEnvisatAsarModel::loadState (const ossimKeywordlist &kwl,
const char *prefix)
{
InitSensorParams(kwl, prefix);
InitPlatformPosition(kwl, prefix);
InitRefPoint(kwl, prefix);
InitSRGR(kwl, prefix);
return true;
}
std::ostream& ossimEnvisatAsarModel::print(std::ostream& out) const
{
// Capture the original flags.
std::ios_base::fmtflags f = out.flags();
out << setprecision(15) << setiosflags(ios::fixed)
<< "\nossimEnvisatAsarModel data members:\n"
<< "_pixel_spacing: " << _pixel_spacing << "\n"
<< "_n_srgr: " << _n_srgr << "\n";
ossimGeometricSarSensorModel::print(out);
// Reset flags.
out.setf(f);
return out;
}
bool ossimEnvisatAsarModel::InitSensorParams(const ossimKeywordlist &kwl, const char *prefix)
{
const double CLUM = 2.99792458e+8;
const char* wave_length_str = kwl.find(prefix,"wave_length");
double wave_length = CLUM / atof(wave_length_str);
const char* fr_str = kwl.find(prefix,"range_sampling_rate");
double fr = atof(fr_str);
// product type : Slant Range or Ground Range
_isProductGeoreferenced = atoi(kwl.find(prefix,"is_groundrange"));
double fa;
if (!_isProductGeoreferenced)
{
const char* fa_str = kwl.find(prefix,"prf");
fa = atof(fa_str);
}
else
{
const char* fa_str = kwl.find(prefix,"line_time_interval");
fa = 1.0/atof(fa_str);
}
// TODO check that this is OK for IMP products
const char* time_dir_pix = "INCREASE";
const char* time_dir_lin = "INCREASE";
if(_sensor != NULL)
{
delete _sensor;
}
_sensor = new SensorParams();
if(strcmp(time_dir_pix, "INCREASE") == 0)
{
_sensor->set_col_direction(1);
}
else
{
_sensor->set_col_direction(-1);
}
if(strcmp(time_dir_lin, "INCREASE") == 0)
{
_sensor->set_lin_direction(1);
}
else
{
_sensor->set_lin_direction(-1);
}
_sensor->set_prf(fa);
_sensor->set_sf(fr);
_sensor->set_rwl(wave_length);
return true;
}
bool ossimEnvisatAsarModel::InitPlatformPosition(const ossimKeywordlist &kwl, const char *prefix)
{
const int neph = 5;
// long val_long;
// unsigned long val_ulong1;
// unsigned long val_ulong2;
CivilDateTime ref_civil_date;
Ephemeris** ephemeris = new Ephemeris*[neph];
for (int i=1;i<=neph;i++)
{
double pos[3];
double vel[3];
char name[64];
/*
* Date de l'ephemeride
*/
sprintf(name,"state_vector_time_%i_day",i);
long day = (long) atof(kwl.find(prefix,name));
sprintf(name,"state_vector_time_%i_sec",i);
unsigned long sec = (unsigned long) atof(kwl.find(prefix,name));
sprintf(name,"state_vector_time_%i_microsec",i);
unsigned long microsec = (unsigned long) atof(kwl.find(prefix,name));
MJDDateTime mjdDate(day, sec, microsec);
JSDDateTime jsd_date(mjdDate);
/*
* Position de l'ephemeride
*/
sprintf(name,"x_pos_%i",i);
pos[0] = (double) atoi(kwl.find(prefix,name)) * 1.0e-2;
sprintf(name,"y_pos_%i",i);
pos[1] = (double) atoi(kwl.find(prefix,name)) * 1.0e-2;
sprintf(name,"z_pos_%i",i);
pos[2] = (double) atoi(kwl.find(prefix,name)) * 1.0e-2;
/*
* Vitesse de l'ephemeride
*/
sprintf(name,"x_vel_%i",i);
vel[0] = (double) atoi(kwl.find(prefix,name)) * 1.0e-5;
sprintf(name,"y_vel_%i",i);
vel[1] = (double) atoi(kwl.find(prefix,name)) * 1.0e-5;
sprintf(name,"z_vel_%i",i);
vel[2] = (double) atoi(kwl.find(prefix,name)) * 1.0e-5;
GeographicEphemeris* eph = new GeographicEphemeris(jsd_date, pos, vel);
ephemeris[i-1] = eph;
}
if (_platformPosition != NULL)
{
delete _platformPosition;
}
_platformPosition = new PlatformPosition(ephemeris,neph);
/*
* Liberation de la memoire utilis�e par la liste des ephemerides
* ->Le constructeur effectue une copie des ephemerides
*/
for (int i=0;i<neph;i++)
{
delete ephemeris[i];
}
delete[] ephemeris;
return true;
}
bool ossimEnvisatAsarModel::InitRefPoint(const ossimKeywordlist &kwl, const char *prefix)
{
// Reference image position
if(_refPoint == NULL)
{
_refPoint = new RefPoint();
}
double sc_pix = atof(kwl.find(prefix,"samp_num")) - 1.0;
_refPoint->set_pix_col(sc_pix);
double sc_lin = atof(kwl.find(prefix,"line_num")) - 1.0;
_refPoint->set_pix_line(sc_lin);
// Reference Point distance
double c = 2.99792458e+8;
double distance = atof(kwl.find(prefix,"slant_range_time")) * (c/2.0) * 1.0e-9;
_refPoint->set_distance(distance);
// Reference Point Ephemeris
long day_ref = (long)atof(kwl.find(prefix,"first_zero_doppler_time_day"));
unsigned long sec_ref = (unsigned long)atof(kwl.find(prefix,"first_zero_doppler_time_sec"));
unsigned long microsec_ref = (unsigned long)atof(kwl.find(prefix,"first_zero_doppler_time_microsec"));
MJDDateTime mjdDate_ref(day_ref, sec_ref, microsec_ref);
JSDDateTime jsd_date_ref(mjdDate_ref);
if(_platformPosition != NULL)
{
Ephemeris * ephemeris = _platformPosition->Interpolate(jsd_date_ref);
if (ephemeris == NULL) return false ;
_refPoint->set_ephemeris(ephemeris);
delete ephemeris;
}
else return false;
// Adjustement of the model using the four corner points
// in order to use ossimSensorModel::lineSampleToWorld
const char* nbCol_str = kwl.find(prefix,"num_pix");
const char* nbLin_str = kwl.find(prefix,"num_lines");
theImageSize.x = atoi(nbCol_str);
theImageSize.y = atoi(nbLin_str);
theImageClipRect = ossimDrect(0, 0, theImageSize.x-1, theImageSize.y-1);
// Ground Control Points extracted from the model : corner points
std::list<ossimGpt> groundGcpCoordinates ;
std::list<ossimDpt> imageGcpCoordinates ;
double h = atof(kwl.find("avg_scene_height"));
// first line first pix
double line = atof(kwl.find("UL_line")) - 1;
double col = atof(kwl.find("UL_col")) - 1;
double lon = atof(kwl.find("UL_lon")) * 1e-6;
double lat = atof(kwl.find("UL_lat")) * 1e-6;
if (lon > 180.0) lon -= 360.0;
ossimDpt imageGCP1(col,line);
ossimGpt groundGCP1(lat, lon, h);
groundGcpCoordinates.push_back(groundGCP1) ;
imageGcpCoordinates.push_back(imageGCP1) ;
// first line last pix
line = atof(kwl.find("UR_line")) - 1;
col = atof(kwl.find("UR_col")) - 1;
lon = atof(kwl.find("UR_lon")) * 1e-6;
lat = atof(kwl.find("UR_lat")) * 1e-6;
if (lon > 180.0) lon -= 360.0;
ossimDpt imageGCP2(col,line);
ossimGpt groundGCP2(lat, lon, h);
groundGcpCoordinates.push_back(groundGCP2) ;
imageGcpCoordinates.push_back(imageGCP2) ;
// last line last pix
line = atof(kwl.find("LR_line")) - 1;
col = atof(kwl.find("LR_col")) - 1;
lon = atof(kwl.find("LR_lon")) * 1e-6;
lat = atof(kwl.find("LR_lat")) * 1e-6;
if (lon > 180.0) lon -= 360.0;
ossimDpt imageGCP3(col,line);
ossimGpt groundGCP3(lat, lon, h);
groundGcpCoordinates.push_back(groundGCP3) ;
imageGcpCoordinates.push_back(imageGCP3) ;
// last line first pix
line = atof(kwl.find("LL_line")) - 1;
col = atof(kwl.find("LL_col")) - 1;
lon = atof(kwl.find("LL_lon")) * 1e-6;
lat = atof(kwl.find("LL_lat")) * 1e-6;
if (lon > 180.0) lon -= 360.0;
ossimDpt imageGCP4(col,line);
ossimGpt groundGCP4(lat, lon, h);
groundGcpCoordinates.push_back(groundGCP4) ;
imageGcpCoordinates.push_back(imageGCP4) ;
// Default optimization
optimizeModel(groundGcpCoordinates, imageGcpCoordinates) ;
return true;
}
bool ossimEnvisatAsarModel::InitSRGR(const ossimKeywordlist &kwl, const char *prefix)
{
// product type : Slant Range or Ground Range
_isProductGeoreferenced = atoi(kwl.find(prefix,"is_groundrange"));
// pixel spacing
_pixel_spacing = atof(kwl.find(prefix,"pixel_spacing"));
// number of SRGR sets
_n_srgr = atoi(kwl.find(prefix,"n_srgr"));
char name[64];
for (int i=0;i<_n_srgr;i++)
{
// SRGR update time
sprintf(name,"srgr_update_day%i",i);
long day_ref = (long)atof(kwl.find(prefix,name));
sprintf(name,"srgr_update_sec%i",i);
unsigned long sec_ref = (unsigned long)atof(kwl.find(prefix,name));
sprintf(name,"srgr_update_microsec%i",i);
unsigned long microsec_ref = (unsigned long)atof(kwl.find(prefix,name));
MJDDateTime mjdDate_ref(day_ref, sec_ref, microsec_ref);
JSDDateTime jsd_date_ref(mjdDate_ref);
_srgr_update.push_back(jsd_date_ref);
std::vector<double> srgr_set;
// SRGR coefficients
sprintf(name,"srgr_coef%iA",i);
const char* srgr_coef_strA = kwl.find(prefix,name);
srgr_set.push_back(atof(srgr_coef_strA));
sprintf(name,"srgr_coef%iB",i);
const char* srgr_coef_strB = kwl.find(prefix,name);
srgr_set.push_back(atof(srgr_coef_strB));
sprintf(name,"srgr_coef%iC",i);
const char* srgr_coef_strC = kwl.find(prefix,name);
srgr_set.push_back(atof(srgr_coef_strC));
sprintf(name,"srgr_coef%iD",i);
const char* srgr_coef_strD = kwl.find(prefix,name);
srgr_set.push_back(atof(srgr_coef_strD));
sprintf(name,"srgr_coef%iE",i);
const char* srgr_coef_strE = kwl.find(prefix,name);
srgr_set.push_back(atof(srgr_coef_strE)*1e-10);
sprintf(name,"srgr_coef%iF",i);
const char* srgr_coef_strF = kwl.find(prefix,name);
srgr_set.push_back(atof(srgr_coef_strF)*1e-10);
_srgr_coefset.push_back(srgr_set);
}
return true;
}
std::vector<double> ossimEnvisatAsarModel::FindSRGRSetNumber(JSDDateTime date) const
{
//if (_n_srgr==0) return NULL ;
std::vector<double> delays;
double delay;
for (int i=0;i<_n_srgr;i++)
{
JSDDateTime datetmp(_srgr_update.at(i));
delay = date.get_second()+date.get_decimal() - (datetmp.get_second() + datetmp.get_decimal());
delays.push_back( fabs(delay) );
}
int setNumber = 0 ;
double min_delay = delays[0] ;
for (int i=1;i<_n_srgr;i++)
{
if (delays[i]<min_delay) {
setNumber = i ;
min_delay = delays[i] ;
}
}
return _srgr_coefset[setNumber];
}
}
|
# Yogo Data Management Toolkit
# Copyright (c) 2010 Montana State University
#
# License -> see license.txt
#
# FILE: preinitilizer.rb
#
#
require File.join(File.dirname(__FILE__), '..', 'lib', 'slash_path')
begin
require File.dirname(__FILE__) / '..' / :'.bundle' / :environment
rescue LoadError
# Fallback on doing the resolve at runtime.
require "rubygems"
require "bundler"
if Bundler::VERSION <= "0.9.10"
raise RuntimeError, "Bundler incompatible.\n" +
"Your bundler version is incompatible with Rails 2.3 and an unlocked bundle.\n" +
"Run `gem install bundler` to upgrade or `bundle lock` to lock."
else
Bundler.setup
end
end
# This ensures the gems load in the proper order.
require 'dm-core'
|
<filename>src/inlineParsers/html/captureInlineHTML.ts
import { exec } from 'utils'
import { INodeHTML, IParsed } from 'models'
const execInlineHTML = exec(/^(?:<(a|em|strong|small|s|cite|q|dfn|abbr|data|time|code|var|samp|kbd|sub|sup|i|b|u|mark|ruby|rt|rp|bdi|bdo|span|br|wbr|ins|del|img))(?: *\S+=['"].*['"])*(?: *\/>|>.*?<\/\1>)/) // tslint:disable-line max-line-length
const captureInlineHTML = (source: string): IParsed<INodeHTML> | null => {
if (source[0] !== '<') {
return null
}
const result = execInlineHTML(source)
if (!result) {
return null
}
const capture = result[0]
return {
token: {
type: 'html',
value: capture
},
newSource: source.substring(capture.length)
}
}
export { captureInlineHTML }
|
<filename>src/server/apiProxy.js
import request from 'request'
import { parse } from 'url'
const { API_HOST } = process.env
const apiHostname = parse(API_HOST).hostname
export default function apiProxy (req, res, next) {
if (!req.originalUrl.startsWith('/noo') &&
!req.originalUrl.startsWith('/admin/kue')) return next()
let url = API_HOST + req.originalUrl
console.log(`${req.method} ${url}`)
request.delete = request.delete || request.del
const method = request[req.method.toLowerCase()]
const headers = { ...req.headers, host: apiHostname }
const upstreamReq = method(url, { headers, followRedirect: false })
req.pipe(upstreamReq)
.on('error', err => console.error('✗ ' + err.message))
.pipe(res)
}
|
#!/bin/bash
sleep 30
cd /opt/spaceinfo
git pull origin master
#feh --file list.txt -F -D 1 -z -Z
while [ 1 ]; do
echo "New loop started"
sudo fbi -T 1 -t 20 /opt/spaceinfo/imgs/*.jpg -a -noverbose -u
sleep 1800
git pull origin master
sudo killall fbi
done
|
var myArray = ["BUET", "CUET", "RUET", "KUET", "DUET", "BUTEX"];
console.log(myArray[1]); |
<reponame>secoya/hablar.js<gh_stars>0
import { builders as b } from 'ast-types';
import * as ASTTypes from 'ast-types/gen/kinds';
export default class Context {
public ctxExpr: ASTTypes.IdentifierKind = b.identifier('ctx');
public encodeIfStringExpr: ASTTypes.IdentifierKind = b.identifier('encodeIfString');
public functionsExpr: ASTTypes.IdentifierKind = b.identifier('fns');
public plusOpExpr: ASTTypes.IdentifierKind = b.identifier('plusOp');
public typeGuardScratchVarExpr: ASTTypes.IdentifierKind = b.identifier('_');
public usesTypeGuardScratchVariable: boolean = false;
public varsExpr: ASTTypes.IdentifierKind = b.identifier('vars');
}
|
# -*- coding: utf-8 -*-
from module.plugins.internal.MultiHoster import MultiHoster, create_getInfo
class ZeveraCom(MultiHoster):
__name__ = "ZeveraCom"
__type__ = "hoster"
__version__ = "0.25"
__pattern__ = r'http://(?:www\.)?zevera\.com/.+'
__description__ = """Zevera.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "zoidberg@mujmail.cz")]
def handlePremium(self):
if self.account.getAPIData(self.req, cmd="checklink", olink=self.pyfile.url) != "Alive":
self.fail(_("Offline or not downloadable"))
header = self.account.getAPIData(self.req, just_header=True, cmd="generatedownloaddirect", olink=self.pyfile.url)
if not "location" in header:
self.fail(_("Unable to initialize download"))
self.link = header['location']
def checkFile(self):
super(ZeveraCom, self).checkFile()
if self.checkDownload({"error": 'action="ErrorDownload.aspx'}) is "error":
self.fail(_("Error response received - contact Zevera support"))
getInfo = create_getInfo(ZeveraCom) |
const path = require('path');
module.exports = {
Rule: require(path.join(__dirname, 'Rule')),
State: require(path.join(__dirname, 'State'))
};
|
# ----------------------------- kiwi common commands start ------------------------------
# 按行输出参数,方便人眼查看过多的参数##
function WordsToLines()
{
# stdin:如果没有参数,则读取标准输入##
[ $# -eq 0 ] && cat /dev/stdin | sed 's/\s\s*/\n/g'
# args:存在参数,直接处理参数##
[ $# -ne 0 ] && echo "$@" | sed 's/\s\s*/\n/g'
}
# 彩色版cat命令,方便人眼区分不同的行(在终端上,过长的行会被折行,导致不易分辨不同的行)##
function ColorLines()
{
local colors=(31 32 33 34 35 36 37)
local count=0
[ $# -eq 0 ] && local files=/dev/stdin # stdin:如果没有文件,则读取标准输入##
[ $# -ne 0 ] && local files="$@" # files:指定文件,直接处理文件##
for file in $files
do
cat $file | while read line
do
local color="${colors[$((count++ % ${#colors[@]}))]}"
echo -e "\e[${color}m${line}\e[0m"
done
done
}
# tailf日志并用tee重定向##
function Tailf()
{
[ $# -ne 1 ] && echo "Usage: $FUNCNAME <logFileName>" && return 1
local log=$(readlink -m $1)
local teeLog=$(dirname $log)/tee.$(basename $log)
[ ! -f $log ] && echo "[$log] is not a file!" && return 1
[ -e $teeLog -a ! -f $teeLog ] && echo "[$teeLog] is not a file!" && return 1
>$teeLog
tailf $log | tee -a $teeLog
}
# 简化find用法##
function Find()
{
[ $# -ne 1 ] && echo "Usage: $FUNCNAME <searchString>" && return 1
find . -iname "*$1*" | grep -i --color "$1"
}
# 简化grep用法##
function Grep()
{
[ $# -ne 1 ] && echo "Usage: $FUNCNAME <searchString>" && return 1
grep -inr --color "$1" .
# 替代方案##
# echo "alias Grep='find . | xargs grep -inr --color'" >>$HOME/.bashrc##
}
# 简化du用法##
function Du()
{
du --max-depth=1 "$@"
# 替代方案##
# echo "alias Du='du --max-depth=1'" >>$HOME/.bashrc##
}
# 简化ps用法##
function Ps()
{
[ $# -ne 1 ] && echo "Usage: $FUNCNAME <searchString>" && return 1
ps -efww | grep -v grep | grep -i --color "$1"
}
# 简化netstat用法##
function Netstat()
{
[ $# -ne 1 ] && echo "Usage: $FUNCNAME <searchString>" && return 1
netstat -anp | grep -i --color "$1"
}
# 查看网卡速率##
function NicRate()
{
[ $# -ne 1 ] && echo "Usage: $FUNCNAME <ethX>" && return 1
while [ "1" ]
do
eth=$1
RXpre=$(cat /proc/net/dev | grep $eth | tr : " " | awk '{print $2}')
TXpre=$(cat /proc/net/dev | grep $eth | tr : " " | awk '{print $10}')
sleep 1
RXnext=$(cat /proc/net/dev | grep $eth | tr : " " | awk '{print $2}')
TXnext=$(cat /proc/net/dev | grep $eth | tr : " " | awk '{print $10}')
clear
RX=$((${RXnext} - ${RXpre}))
TX=$((${TXnext} - ${TXpre}))
if [[ $RX -lt 1024 ]]; then
RX="${RX}B/s"
elif [[ $RX -gt 1048576 ]]; then
RX=$(echo $RX | awk '{print $1/1048576 "MB/s"}')
else
RX=$(echo $RX | awk '{print $1/1024 "KB/s"}')
fi
if [[ $TX -lt 1024 ]]; then
TX="${TX}B/s"
elif [[ $TX -gt 1048576 ]]; then
TX=$(echo $TX | awk '{print $1/1048576 "MB/s"}')
else
TX=$(echo $TX | awk '{print $1/1024 "KB/s"}')
fi
# -e参数使换行符\n、制表符\t等可以生效##
echo -e "$(date +%k:%M:%S) RX(receive) TX(transport) \n$eth $RX $TX" | column -t
done
}
# 统计各个TCP连接状态的个数,用于方便排查系统连接负荷问题##
function TcpConnectionStateCounter()
{
netstat -an | awk '/^tcp/ {++status_count_array[$NF]} END{for(status in status_count_array) print status, status_count_array[status]}' | column -t | sort -nr -k2
}
# 简化tcpdump用法##
function Tcpdump()
{
if [ $# -eq 1 ]; then
local port=$1
echo "tcpdump -nSX -s 0 port $port"
tcpdump -nSX -s 0 port $port
elif [ $# -eq 2 ]; then
local ethX=$1
local port=$2
echo "tcpdump -nSX -s 0 -i $ethX port $port"
tcpdump -nSX -s 0 -i $ethX port $port
else
echo "Usage1: $FUNCNAME <port>"
echo "Usage2: $FUNCNAME <ethX> <port>"
return 1
fi
}
# 简化zip用法##
function Zip()
{
# 参数校验。##
[ $# -ne 1 ] && echo "Usage: $FUNCNAME <file or dir>" && return 1
# 定义变量。##
local file=$1
# 如果末尾存在目录分隔符,则去除掉。##
file=${file%/}
# 检查文件是否存在。##
[ ! -e $file ] && echo "[$file] does not exist!" && return 1
# 压缩文件。##
zip -r $file-$(date "+%Y%m%d-%H%M%S").zip $file
}
# 展示帮助信息##
function HelpLinux()
{
echo 'WordsToLines 按行输出参数,方便人眼查看过多的参数'
echo 'ColorLines 彩色版cat命令,方便人眼区分不同的行'
echo 'Tailf tailf日志并用tee重定向'
echo 'Find 简化find用法'
echo 'Grep 简化grep用法'
echo 'Du 简化du用法'
echo 'Ps 简化ps用法'
echo 'Netstat 简化netstat用法'
echo 'NicRate 查看网卡速率'
echo 'TcpConnectionStateCounter 统计各个TCP连接状态的个数'
echo 'Tcpdump 简化tcpdump用法'
echo 'Zip 简化zip用法'
echo
echo 'HelpLinux 展示帮助信息'
}
# 其他命令##
alias l='ls -alF'
alias ll='ls -l'
alias ..='cd ..'
alias ...='cd ../..'
alias ....='cd ../../..'
alias cd..='cd ..'
alias cd...='cd ../..'
alias cd....='cd ../../..'
alias vi='vim'
alias tailf='tail -f'
alias grep='grep --color=auto'
# ----------------------------- kiwi common commands end --------------------------------
|
<reponame>istxing/kgo<filename>encrypt_test.go
package kgo
import (
"crypto/aes"
"fmt"
"strings"
"testing"
)
func TestBase64Encode(t *testing.T) {
str := []byte("This is an string to encod")
res := KEncr.Base64Encode(str)
if !strings.HasSuffix(res, "=") {
t.Error("Base64Encode fail")
return
}
}
func BenchmarkBase64Encode(b *testing.B) {
b.ResetTimer()
str := []byte("This is an string to encod")
for i := 0; i < b.N; i++ {
KEncr.Base64Encode(str)
}
}
func TestBase64Decode(t *testing.T) {
str := "VGhpcyBpcyBhbiBlbmNvZGVkIHN0cmluZw=="
_, err := KEncr.Base64Decode(str)
if err != nil {
t.Error("Base64Decode fail")
return
}
_, err = KEncr.Base64Decode("#iu3498r")
if err == nil {
t.Error("Base64Decode fail")
return
}
_, err = KEncr.Base64Decode("VGhpcy")
_, err = KEncr.Base64Decode("VGhpcyB")
}
func BenchmarkBase64Decode(b *testing.B) {
b.ResetTimer()
str := "VGhpcyBpcyBhbiBlbmNvZGVkIHN0cmluZw=="
for i := 0; i < b.N; i++ {
_, _ = KEncr.Base64Decode(str)
}
}
func TestBase64UrlEncodeDecode(t *testing.T) {
str := []byte("This is an string to encod")
res := KEncr.Base64UrlEncode(str)
if strings.HasSuffix(res, "=") {
t.Error("Base64UrlEncode fail")
return
}
_, err := KEncr.Base64UrlDecode(res)
if err != nil {
t.Error("Base64UrlDecode fail")
return
}
}
func BenchmarkBase64UrlEncode(b *testing.B) {
b.ResetTimer()
str := []byte("This is an string to encod")
for i := 0; i < b.N; i++ {
KEncr.Base64UrlEncode(str)
}
}
func BenchmarkBase64UrlDecode(b *testing.B) {
b.ResetTimer()
str := "VGhpcyBpcyBhbiBzdHJpbmcgdG8gZW5jb2Q"
for i := 0; i < b.N; i++ {
_, _ = KEncr.Base64UrlDecode(str)
}
}
func TestAuthCode(t *testing.T) {
key := "123456"
str := "hello world"
res, _ := KEncr.AuthCode(str, key, true, 0)
if res == "" {
t.Error("AuthCode Encode fail")
return
}
res2, _ := KEncr.AuthCode(res, key, false, 0)
if res2 == "" {
t.Error("AuthCode Decode fail")
return
}
res, _ = KEncr.AuthCode(str, key, true, -3600)
KEncr.AuthCode(res, key, false, 0)
KEncr.AuthCode("", key, true, 0)
KEncr.AuthCode("", "", true, 0)
KEncr.AuthCode("7caeNfPt/N1zHdj5k/7i7pol6NHsVs0Cji7c15h4by1RYcrBoo7EEw==", key, false, 0)
KEncr.AuthCode("<KEY>", key, false, 0)
KEncr.AuthCode("123456", "", false, 0)
KEncr.AuthCode("1234#iu3498r", "", false, 0)
}
func BenchmarkAuthCodeEncode(b *testing.B) {
b.ResetTimer()
key := "123456"
str := "hello world"
for i := 0; i < b.N; i++ {
KEncr.AuthCode(str, key, true, 0)
}
}
func BenchmarkAuthCodeDecode(b *testing.B) {
b.ResetTimer()
key := "123456"
str := "a79b5do3C9nbaZsAz5j3NQRj4e/L6N+y5fs2U9r1mO0LinOWtxmscg=="
for i := 0; i < b.N; i++ {
KEncr.AuthCode(str, key, false, 0)
}
}
func TestPasswordHashVerify(t *testing.T) {
pwd := []byte("<PASSWORD>")
has, err := KEncr.PasswordHash(pwd)
if err != nil {
t.Error("PasswordHash fail")
return
}
chk := KEncr.PasswordVerify(pwd, has)
if !chk {
t.Error("PasswordVerify fail")
return
}
_, _ = KEncr.PasswordHash(pwd, 1)
//慎用20以上,太耗时
_, _ = KEncr.PasswordHash(pwd, 15)
_, _ = KEncr.PasswordHash(pwd, 33)
}
func BenchmarkPasswordHash(b *testing.B) {
b.ResetTimer()
pwd := []byte("<PASSWORD>")
for i := 0; i < b.N; i++ {
//太耗时,只测试少量的
if i > 10 {
break
}
_, _ = KEncr.PasswordHash(pwd)
}
}
func BenchmarkPasswordVerify(b *testing.B) {
b.ResetTimer()
pwd := []byte("<PASSWORD>")
has := []byte("$2a$10$kCv6ljsVuTSI54oPkWulreEm<PASSWORD>/zj0Dgh<PASSWORD>")
for i := 0; i < b.N; i++ {
//太耗时,只测试少量的
if i > 10 {
break
}
KEncr.PasswordVerify(pwd, has)
}
}
func TestEasyEncryptDecrypt(t *testing.T) {
key := "123456"
str := "hello world你好!hello world你好!hello world你好!hello world你好!"
enc := KEncr.EasyEncrypt(str, key)
if enc == "" {
t.Error("EasyEncrypt fail")
return
}
dec := KEncr.EasyDecrypt(enc, key)
if dec != str {
t.Error("EasyDecrypt fail")
return
}
dec = KEncr.EasyDecrypt("你好,世界!", key)
if dec != "" {
t.Error("EasyDecrypt fail")
return
}
KEncr.EasyEncrypt("", key)
KEncr.EasyEncrypt("", "")
KEncr.EasyDecrypt(enc, "1qwer")
KEncr.EasyDecrypt("123", key)
KEncr.EasyDecrypt("1234#iu3498r", key)
}
func BenchmarkEasyEncrypt(b *testing.B) {
b.ResetTimer()
key := "123456"
str := "hello world你好"
for i := 0; i < b.N; i++ {
KEncr.EasyEncrypt(str, key)
}
}
func BenchmarkEasyDecrypt(b *testing.B) {
b.ResetTimer()
key := "123456"
str := "e10azZaczdODqqimpcY"
for i := 0; i < b.N; i++ {
KEncr.EasyDecrypt(str, key)
}
}
func TestHmacShaX(t *testing.T) {
str := []byte("hello world")
key := []byte("123456")
res1 := KEncr.HmacShaX(str, key, 1)
res2 := KEncr.HmacShaX(str, key, 256)
res3 := KEncr.HmacShaX(str, key, 512)
if res1 == "" || res2 == "" || res3 == "" {
t.Error("HmacShaX fail")
return
}
}
func TestHmacShaXPanic(t *testing.T) {
defer func() {
if r := recover(); r != nil {
fmt.Println("recover...:", r)
}
}()
str := []byte("hello world")
key := []byte("123456")
KEncr.HmacShaX(str, key, 4)
}
func BenchmarkHmacShaX(b *testing.B) {
b.ResetTimer()
str := []byte("hello world")
key := []byte("123456")
for i := 0; i < b.N; i++ {
KEncr.HmacShaX(str, key, 256)
}
}
func TestPkcs7PaddingUnPadding(t *testing.T) {
var emp1 []byte
var emp2 = []byte("")
key1 := []byte("1234")
dat1 := []byte{49, 50, 51, 52, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12}
dat2 := []byte{49, 50, 51, 52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
var tests = []struct {
cipher []byte
orig []byte
size int
zero bool
expected1 []byte
expected2 []byte
}{
{nil, nil, aes.BlockSize, false, nil, nil},
{emp1, emp1, aes.BlockSize, false, nil, nil},
{emp2, emp2, aes.BlockSize, false, nil, nil},
{key1, key1, 0, false, nil, nil},
{key1, dat1, aes.BlockSize, false, dat1, key1},
{key1, dat2, aes.BlockSize, true, dat2, nil},
{key1, dat2, aes.BlockSize, false, dat1, emp1},
}
for _, test := range tests {
actual1 := pkcs7Padding(test.cipher, test.size, test.zero)
if !KArr.IsEqualArray(actual1, test.expected1) {
t.Errorf("Expected pkcs7Padding(%v, %d, %t) to be %v, got %v", test.cipher, test.size, test.zero, test.expected1, actual1)
}
actual2 := pkcs7UnPadding(test.orig, test.size)
if !KArr.IsEqualArray(actual2, test.expected2) {
t.Errorf("Expected pkcs7UnPadding(%v, %d) to be %v, got %v", test.orig, test.size, test.expected2, actual2)
}
}
}
func BenchmarkPkcs7Padding(b *testing.B) {
b.ResetTimer()
str := []byte("1234")
for i := 0; i < b.N; i++ {
pkcs7Padding(str, 16, false)
}
}
func BenchmarkPkcs7UnPadding(b *testing.B) {
b.ResetTimer()
data := []byte{49, 50, 51, 52, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12}
for i := 0; i < b.N; i++ {
pkcs7UnPadding(data, 16)
}
}
func TestZeroPaddingUnPadding(t *testing.T) {
key := []byte("hello")
ori := zeroPadding(key, 16)
res := zeroUnPadding(ori)
if ori == nil {
t.Error("zeroPadding fail")
return
}
if !KArr.IsEqualArray(key, res) {
t.Error("zeroUnPadding fail")
return
}
}
func BenchmarkZeroPadding(b *testing.B) {
b.ResetTimer()
key := []byte("hello")
for i := 0; i < b.N; i++ {
zeroPadding(key, 16)
}
}
func BenchmarkZeroUnPadding(b *testing.B) {
b.ResetTimer()
ori := []byte{104, 101, 108, 108, 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
for i := 0; i < b.N; i++ {
zeroUnPadding(ori)
}
}
func TestAesCBCEncryptDecrypt(t *testing.T) {
ori := []byte("hello")
key := []byte("1234567890123456")
emp := []byte("")
var err error
var enc, des []byte
_, err = KEncr.AesCBCEncrypt(ori, []byte("123"))
if err == nil {
t.Error("AesCBCEncrypt fail")
return
}
enc, err = KEncr.AesCBCEncrypt(ori, key)
des, err = KEncr.AesCBCDecrypt(enc, key)
if !KArr.IsEqualArray(ori, des) {
t.Error("AesCBCEncrypt fail")
return
}
enc, err = KEncr.AesCBCEncrypt(ori, key, PKCS_SEVEN)
des, err = KEncr.AesCBCDecrypt(enc, key, PKCS_SEVEN)
if !KArr.IsEqualArray(ori, des) {
t.Error("AesCBCEncrypt fail")
return
}
enc, err = KEncr.AesCBCEncrypt(emp, key, PKCS_SEVEN)
des, err = KEncr.AesCBCDecrypt(enc, key, PKCS_SEVEN)
if !KArr.IsEqualArray(emp, des) {
t.Error("AesCBCEncrypt fail")
return
}
enc, err = KEncr.AesCBCEncrypt(ori, key, PKCS_ZERO)
des, err = KEncr.AesCBCDecrypt(enc, key, PKCS_ZERO)
if !KArr.IsEqualArray(ori, des) {
t.Error("AesCBCEncrypt fail")
return
}
enc = []byte{83, 28, 170, 254, 29, 174, 21, 129, 241, 233, 243, 84, 1, 250, 95, 122, 104, 101, 108, 108, 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
des, err = KEncr.AesCBCDecrypt(enc, key, PKCS_ZERO)
if err == nil {
t.Error("AesCBCDecrypt fail")
return
}
_, err = KEncr.AesCBCDecrypt(enc, []byte("1"))
if err == nil {
t.Error("AesCBCDecrypt fail")
return
}
_, err = KEncr.AesCBCDecrypt([]byte("1234"), key)
if err == nil {
t.Error("AesCBCDecrypt fail")
return
}
}
func BenchmarkAesCBCEncrypt(b *testing.B) {
b.ResetTimer()
ori := []byte("hello")
key := []byte("1234567890123456")
for i := 0; i < b.N; i++ {
_, _ = KEncr.AesCBCEncrypt(ori, key)
}
}
func BenchmarkAesCBCDecryptZero(b *testing.B) {
b.ResetTimer()
enc := []byte{214, 214, 97, 208, 185, 68, 246, 40, 124, 3, 155, 58, 5, 84, 136, 10, 104, 101, 108, 108, 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
key := []byte("1234567890123456")
for i := 0; i < b.N; i++ {
_, _ = KEncr.AesCBCDecrypt(enc, key, PKCS_ZERO)
}
}
func BenchmarkAesCBCDecryptSeven(b *testing.B) {
b.ResetTimer()
enc := []byte{17, 195, 8, 206, 231, 183, 143, 246, 244, 137, 216, 185, 120, 175, 90, 111, 104, 101, 108, 108, 111, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11}
key := []byte("1234567890123456")
for i := 0; i < b.N; i++ {
_, _ = KEncr.AesCBCDecrypt(enc, key, PKCS_SEVEN)
}
}
func TestAesCFBEncryptDecrypt(t *testing.T) {
ori := []byte("hello")
key := []byte("1234567890123456")
emp := []byte("")
var err error
var enc, des []byte
_, err = KEncr.AesCFBEncrypt(ori, []byte("123"))
if err == nil {
t.Error("AesCFBEncrypt fail")
return
}
enc, err = KEncr.AesCFBEncrypt(ori, key)
des, err = KEncr.AesCFBDecrypt(enc, key)
if !KArr.IsEqualArray(ori, des) {
t.Error("AesCFBEncrypt fail")
return
}
enc, err = KEncr.AesCFBEncrypt(emp, key)
des, err = KEncr.AesCFBDecrypt(enc, key)
if !KArr.IsEqualArray(emp, des) {
t.Error("AesCFBEncrypt fail")
return
}
_, err = KEncr.AesCFBDecrypt(enc, []byte("1"))
if err == nil {
t.Error("AesCFBDecrypt fail")
return
}
_, err = KEncr.AesCFBDecrypt([]byte("1234"), key)
if err == nil {
t.Error("AesCFBDecrypt fail")
return
}
}
func BenchmarkAesCFBEncrypt(b *testing.B) {
b.ResetTimer()
ori := []byte("hello")
key := []byte("1234567890123456")
for i := 0; i < b.N; i++ {
_, _ = KEncr.AesCFBEncrypt(ori, key)
}
}
func BenchmarkAesCFBDecrypt(b *testing.B) {
b.ResetTimer()
enc := []byte{150, 234, 226, 46, 34, 206, 171, 155, 186, 66, 116, 201, 63, 67, 227, 217, 104, 101, 108, 108, 111}
key := []byte("1234567890123456")
for i := 0; i < b.N; i++ {
_, _ = KEncr.AesCFBDecrypt(enc, key)
}
}
func TestAesCTREncryptDecrypt(t *testing.T) {
ori := []byte("hello")
key := []byte("1234567890123456")
emp := []byte("")
var err error
var enc, des []byte
_, err = KEncr.AesCTREncrypt(ori, []byte("123"))
if err == nil {
t.Error("AesCTREncrypt fail")
return
}
enc, err = KEncr.AesCTREncrypt(ori, key)
des, err = KEncr.AesCTRDecrypt(enc, key)
if !KArr.IsEqualArray(ori, des) {
t.Error("AesCTREncrypt fail")
return
}
enc, err = KEncr.AesCTREncrypt(emp, key)
des, err = KEncr.AesCTRDecrypt(enc, key)
if !KArr.IsEqualArray(emp, des) {
t.Error("AesCTREncrypt fail")
return
}
_, err = KEncr.AesCTRDecrypt(enc, []byte("1"))
if err == nil {
t.Error("AesCTRDecrypt fail")
return
}
_, err = KEncr.AesCTRDecrypt([]byte("1234"), key)
if err == nil {
t.Error("AesCTRDecrypt fail")
return
}
}
func BenchmarkAesCTREncrypt(b *testing.B) {
b.ResetTimer()
ori := []byte("hello")
key := []byte("1234567890123456")
for i := 0; i < b.N; i++ {
_, _ = KEncr.AesCTREncrypt(ori, key)
}
}
func BenchmarkAesCTRDecrypt(b *testing.B) {
b.ResetTimer()
enc := []byte{225, 187, 161, 145, 117, 191, 229, 20, 164, 43, 242, 23, 138, 241, 74, 27, 104, 101, 108, 108, 111}
key := []byte("1234567890123456")
for i := 0; i < b.N; i++ {
_, _ = KEncr.AesCTRDecrypt(enc, key)
}
}
func TestAesOFBEncryptDecrypt(t *testing.T) {
ori := []byte("hello")
key := []byte("1234567890123456")
emp := []byte("")
var err error
var enc, des []byte
_, err = KEncr.AesOFBEncrypt(ori, []byte("123"))
if err == nil {
t.Error("AesOFBEncrypt fail")
return
}
enc, err = KEncr.AesOFBEncrypt(ori, key)
des, err = KEncr.AesOFBDecrypt(enc, key)
if !KArr.IsEqualArray(ori, des) {
t.Error("AesOFBEncrypt fail")
return
}
enc, err = KEncr.AesOFBEncrypt(emp, key)
des, err = KEncr.AesOFBDecrypt(enc, key)
if !KArr.IsEqualArray(emp, des) {
t.Error("AesOFBEncrypt fail")
return
}
_, err = KEncr.AesOFBDecrypt(enc, []byte("1"))
if err == nil {
t.Error("AesOFBDecrypt fail")
return
}
_, err = KEncr.AesOFBDecrypt([]byte("1234"), key)
if err == nil {
t.Error("AesOFBDecrypt fail")
return
}
}
func BenchmarkAesOFBEncrypt(b *testing.B) {
b.ResetTimer()
ori := []byte("hello")
key := []byte("1234567890123456")
for i := 0; i < b.N; i++ {
_, _ = KEncr.AesOFBEncrypt(ori, key)
}
}
func BenchmarkAesOFBDecrypt(b *testing.B) {
b.ResetTimer()
enc := []byte{66, 87, 29, 157, 2, 128, 196, 94, 141, 224, 221, 41, 162, 41, 159, 207, 104, 101, 108, 108, 111}
key := []byte("1234567890123456")
for i := 0; i < b.N; i++ {
_, _ = KEncr.AesOFBDecrypt(enc, key)
}
}
func TestGenerateRsaKeys(t *testing.T) {
_, _, err := KEncr.GenerateRsaKeys(1)
if err == nil {
t.Error("GenerateRsaKeys fail")
return
}
pri, pub, err := KEncr.GenerateRsaKeys(2048)
if len(pri) < 100 || len(pub) < 100 {
t.Error("GenerateRsaKeys fail")
return
}
if err != nil {
println(err.Error())
}
}
func BenchmarkGenerateRsaKeys(b *testing.B) {
b.ResetTimer()
for i := 0; i < b.N; i++ {
_, _, _ = KEncr.GenerateRsaKeys(1024)
}
}
func TestRsaPublicEncryptPrivateDecrypt(t *testing.T) {
var enc, des []byte
var err error
word := []byte("hello world")
pubkey1, _ := KFile.ReadFile("testdata/rsa/public_key.pem")
prikey1, _ := KFile.ReadFile("testdata/rsa/private_key.pem")
pubkey2 := `-----BEGIN RSA PRIVATE KEY-----
<KEY>`
prikey2 := `-----BEGIN RSA PUBLIC KEY-----
<KEY>`
enc, err = KEncr.RsaPublicEncrypt(word, pubkey1)
if err != nil {
t.Error("RsaPublicEncrypt fail")
return
}
des, err = KEncr.RsaPrivateDecrypt(enc, prikey1)
if err != nil {
t.Error("RsaPrivateDecrypt fail")
return
}
if !KArr.IsEqualArray(word, des) {
t.Error("RsaPrivateDecrypt fail")
return
}
_, err = KEncr.RsaPublicEncrypt(word, []byte("123"))
if err == nil {
t.Error("RsaPublicEncrypt fail")
return
}
_, err = KEncr.RsaPublicEncrypt(word, []byte(pubkey2))
if err == nil {
t.Error("RsaPublicEncrypt fail")
return
}
_, err = KEncr.RsaPrivateDecrypt(enc, []byte("123"))
if err == nil {
t.Error("RsaPrivateDecrypt fail")
return
}
_, err = KEncr.RsaPrivateDecrypt(enc, []byte(prikey2))
if err == nil {
t.Error("RsaPrivateDecrypt fail")
return
}
}
func BenchmarkRsaPublicEncrypt(b *testing.B) {
b.ResetTimer()
word := []byte("hello world")
pubkey, _ := KFile.ReadFile("testdata/rsa/public_key.pem")
for i := 0; i < b.N; i++ {
_, _ = KEncr.RsaPublicEncrypt(word, pubkey)
}
}
func BenchmarkRsaPrivateDecrypt(b *testing.B) {
b.ResetTimer()
data := []byte{143, 167, 230, 243, 173, 106, 253, 203, 191, 77, 142, 78, 116, 8, 81, 120, 197, 206, 141, 219, 255, 210, 42, 71, 202, 47, 153, 60, 152, 163, 160, 226, 110, 102, 50, 20, 165, 181, 236, 160, 109, 229, 1, 11, 80, 164, 9, 56, 188, 66, 199, 227, 69, 88, 88, 143, 159, 211, 41, 169, 231, 215, 241, 35, 79, 208, 44, 43, 143, 163, 64, 107, 166, 128, 101, 106, 73, 248, 161, 36, 201, 161, 171, 241, 227, 114, 137, 28, 156, 63, 147, 52, 189, 230, 136, 90, 123, 21, 73, 172, 188, 8, 53, 98, 36, 185, 131, 171, 222, 52, 124, 48, 207, 82, 123, 234, 5, 97, 53, 47, 234, 6, 81, 118, 81, 161, 130, 172}
prikey, _ := KFile.ReadFile("testdata/rsa/private_key.pem")
for i := 0; i < b.N; i++ {
_, _ = KEncr.RsaPrivateDecrypt(data, prikey)
}
}
func TestRsaPrivateEncryptPublicDecrypt(t *testing.T) {
var enc, des []byte
var err error
str := strings.Repeat("hello world", 10)
word := []byte(str)
pubkey1, _ := KFile.ReadFile("testdata/rsa/public_key.pem")
prikey1, _ := KFile.ReadFile("testdata/rsa/private_key.pem")
pubkey2 := `-----<KEY>`
prikey2 := `-----<KEY>`
enc, err = KEncr.RsaPrivateEncrypt(word, prikey1)
if err != nil {
println(err.Error())
t.Error("RsaPrivateEncrypt fail")
return
}
des, err = KEncr.RsaPublicDecrypt(enc, pubkey1)
if err != nil {
t.Error("RsaPublicDecrypt fail")
return
}
if !KArr.IsEqualArray(word, des) {
t.Error("RsaPublicDecrypt fail")
return
}
_, err = KEncr.RsaPrivateEncrypt(word, []byte("123"))
if err == nil {
t.Error("RsaPrivateEncrypt fail")
return
}
_, err = KEncr.RsaPrivateEncrypt(word, []byte(prikey2))
if err == nil {
t.Error("RsaPrivateEncrypt fail")
return
}
_, err = KEncr.RsaPublicDecrypt(enc, []byte("123"))
if err == nil {
t.Error("RsaPublicDecrypt fail")
return
}
_, err = KEncr.RsaPublicDecrypt(enc, []byte(pubkey2))
if err == nil {
t.Error("RsaPublicDecrypt fail")
return
}
}
func BenchmarkRsaPrivateEncrypt(b *testing.B) {
b.ResetTimer()
word := []byte("hello world")
prikey, _ := KFile.ReadFile("testdata/rsa/private_key.pem")
for i := 0; i < b.N; i++ {
_, _ = KEncr.RsaPrivateEncrypt(word, prikey)
}
}
func BenchmarkRsaPublicDecrypt(b *testing.B) {
b.ResetTimer()
data := []byte{134, 85, 170, 196, 249, 255, 241, 73, 245, 105, 254, 226, 205, 183, 69, 1, 214, 60, 209, 162, 8, 50, 87, 148, 215, 2, 198, 212, 82, 5, 49, 39, 219, 182, 194, 12, 198, 23, 0, 99, 99, 145, 32, 138, 182, 104, 0, 190, 69, 46, 213, 2, 243, 139, 161, 15, 0, 69, 242, 145, 240, 86, 173, 242, 7, 71, 151, 160, 145, 21, 15, 117, 7, 202, 243, 70, 11, 105, 247, 198, 192, 213, 152, 56, 85, 76, 237, 38, 155, 78, 81, 212, 160, 223, 41, 54, 143, 110, 214, 97, 138, 180, 139, 240, 178, 14, 67, 77, 19, 169, 103, 222, 34, 172, 5, 141, 64, 8, 63, 17, 72, 180, 54, 59, 20, 105, 124, 221}
pubkey, _ := KFile.ReadFile("testdata/rsa/public_key.pem")
for i := 0; i < b.N; i++ {
_, _ = KEncr.RsaPublicDecrypt(data, pubkey)
}
}
|
TERMUX_PKG_HOMEPAGE=https://www.libsdl.org/projects/SDL_image/
TERMUX_PKG_DESCRIPTION="A simple library to load images of various formats as SDL surfaces"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_MAINTAINER="Leonid Plyushch <leonid.plyushch@gmail.com>"
TERMUX_PKG_VERSION=1.2.12
TERMUX_PKG_REVISION=2
TERMUX_PKG_SRCURL=https://www.libsdl.org/projects/SDL_image/release/SDL_image-$TERMUX_PKG_VERSION.tar.gz
TERMUX_PKG_SHA256=0b90722984561004de84847744d566809dbb9daf732a9e503b91a1b5a84e5699
TERMUX_PKG_DEPENDS="libjpeg-turbo, libpng, libtiff, sdl, zlib"
|
import android.location.Location;
import android.location.LocationManager;
import android.os.Bundle;
public class RestaurantFinderActivity extends AppCompatActivity {
private LocationManager locationManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_restaurant_finder);
locationManager = (LocationManager)
getSystemService(LOCATION_SERVICE);
}
// Method to find the nearest restaurants
public void getNearestRestaurants() {
// Get user’s current location
Location location = locationManager.getLastKnownLocation(LocationManager.GPS_PROVIDER);
// Make a network call
// to get restaurants
// in the vicinity
}
} |
import { ChannelId, UserId } from "../../types"
import { ChannelReadStateEntity } from "../../entity/ChannelReadState"
export interface IChannelReadStateQueryRepository {
find(channelId: ChannelId, userId: UserId): Promise<ChannelReadStateEntity | null>
}
|
#!/usr/bin/env bash
# The cleanup hook ensures these containers are removed when the script exits.
POSTGRES_CONTAINER=test-container
NET=localhost:8981
CURL_TEMPFILE=curl_out.txt
PIDFILE=testindexerpidfile
CONNECTION_STRING="host=localhost user=algorand password=algorand dbname=DB_NAME_HERE port=5434 sslmode=disable"
MAX_TIME=20
###################
## Print Helpers ##
###################
function print_alert() {
printf "\n=====\n===== $1\n=====\n"
}
##################
## Test Helpers ##
##################
function fail_and_exit {
print_alert "Failed test - $1 ($2): $3"
exit 1
}
# $1 - database
# $2 - query
function base_query() {
#export PGPASSWORD=algorand
#psql -XA -h localhost -p 5434 -h localhost -U algorand $1 -c "$2"
docker exec $POSTGRES_CONTAINER psql -XA -Ualgorand $1 -c "$2"
}
# SQL Test - query and veryify results
# $1 - max runtime in seconds, default value = 20
# $2 - test description.
# $3 - database
# $4 - query
# $5 - substring that should be in the response
function sql_test_timeout {
local MAX_TIME_BEFORE=MAX_TIME
MAX_TIME=$1
shift
sql_test "$@"
MAX_TIME=$MAX_TIME_BEFORE
}
# SQL Test - query and veryify results
# $1 - test description.
# $2 - database
# $3 - query
# $4... - substring(s) that should be in the response
function sql_test {
local DESCRIPTION=$1
shift
local DATABASE=$1
shift
local QUERY=$1
shift
local SUBSTRING
local START=$SECONDS
set +e
RES=$(base_query $DATABASE "$QUERY")
if [[ $? != 0 ]]; then
echo "ERROR from psql: $RESULT"
fail_and_exit "$DESCRIPTION" "$QUERY" "psql had a non-zero exit code."
fi
set -e
# Check results
for SUBSTRING in "$@"; do
if [[ "$RES" != *"$SUBSTRING"* ]]; then
fail_and_exit "$DESCRIPTION" "$QUERY" "unexpected response. should contain '$SUBSTRING', actual: '$RES'"
fi
done
local ELAPSED=$(($SECONDS - $START))
if [[ $ELAPSED -gt $MAX_TIME ]]; then
fail_and_exit "$DESCRIPTION" "$QUERY" "query duration too long, $ELAPSED > $MAX_TIME"
fi
print_alert "Passed test: $DESCRIPTION"
}
# rest_test helper
function base_curl() {
curl -o "$CURL_TEMPFILE" -w "%{http_code}" -q -s "$NET$1"
}
# CURL Test - query and veryify results
# $1 - max runtime in seconds, default value = 20
# $2 - test description.
# $3 - query
# $4 - expected status code
# $5... - substring that should be in the response
function rest_test_timeout {
local MAX_TIME_BEFORE=MAX_TIME
MAX_TIME=$1
shift
rest_test "$@"
MAX_TIME=$MAX_TIME_BEFORE
}
# CURL Test - query and veryify results
# $1 - test description.
# $2 - query
# $3 - expected status code
# $4... - substring(s) that should be in the response
function rest_test {
local DESCRIPTION=$1
shift
local QUERY=$1
shift
local EXPECTED_CODE=$1
shift
local SUBSTRING
local START=$SECONDS
set +e
local CODE=$(base_curl "$QUERY")
if [[ $? != 0 ]]; then
cat $CURL_TEMPFILE
fail_and_exit "$DESCRIPTION" "$QUERY" "curl had a non-zero exit code."
fi
set -e
local RES=$(cat "$CURL_TEMPFILE")
if [[ "$CODE" != "$EXPECTED_CODE" ]]; then
fail_and_exit "$DESCRIPTION" "$QUERY" "unexpected HTTP status code expected $EXPECTED_CODE (actual $CODE): $RES"
fi
local ELAPSED=$(($SECONDS - $START))
if [[ $ELAPSED -gt $MAX_TIME ]]; then
fail_and_exit "$DESCRIPTION" "$QUERY" "query duration too long, $ELAPSED > $MAX_TIME"
fi
# Check result substrings
for SUBSTRING in "$@"; do
if [[ "$RES" != *"$SUBSTRING"* ]]; then
fail_and_exit "$DESCRIPTION" "$QUERY" "unexpected response. should contain '$SUBSTRING', actual: $RES"
fi
done
print_alert "Passed test: $DESCRIPTION"
}
#####################
## Indexer Helpers ##
#####################
# Suppresses output if the command succeeds
# $1 command to run
function suppress() {
/bin/rm --force /tmp/suppress.out 2> /dev/null
${1+"$@"} > /tmp/suppress.out 2>&1 || cat /tmp/suppress.out
/bin/rm /tmp/suppress.out
}
# $1 - connection string
# $2 - if set, puts in read-only mode
function start_indexer_with_connection_string() {
if [ ! -z $2 ]; then
RO="--no-algod"
fi
ALGORAND_DATA= ../cmd/algorand-indexer/algorand-indexer daemon \
-S $NET "$RO" \
-P "$1" \
"$RO" \
--pidfile $PIDFILE 2>&1 > /dev/null &
}
# $1 - postgres dbname
# $2 - if set, halts execution
function start_indexer() {
if [ ! -z $2 ]; then
echo "daemon -S $NET -P \"${CONNECTION_STRING/DB_NAME_HERE/$1}\""
sleep infinity
fi
start_indexer_with_connection_string "${CONNECTION_STRING/DB_NAME_HERE/$1}"
}
# $1 - postgres dbname
# $2 - e2edata tar.bz2 archive
# $3 - if set, halts execution
function start_indexer_with_blocks() {
if [ ! -f $2 ]; then
echo "Cannot find $2"
exit
fi
create_db $1
local TEMPDIR=$(mktemp -d -t ci-XXXXXXX)
tar -xf "$2" -C $TEMPDIR
if [ ! -z $3 ]; then
echo "Start args 'import -P \"${CONNECTION_STRING/DB_NAME_HERE/$1}\" --genesis \"$TEMPDIR/algod/genesis.json\" $TEMPDIR/blocktars/*'"
sleep infinity
fi
ALGORAND_DATA= ../cmd/algorand-indexer/algorand-indexer import \
-P "${CONNECTION_STRING/DB_NAME_HERE/$1}" \
--genesis "$TEMPDIR/algod/genesis.json" \
$TEMPDIR/blocktars/*
rm -rf $TEMPDIR
start_indexer $1 $3
}
# $1 - number of attempts
function wait_for_started() {
wait_for '"round":' "$1"
}
# $1 - number of attempts
function wait_for_migrated() {
wait_for '"migration-required":false' "$1"
}
# $1 - number of attempts
function wait_for_available() {
wait_for '"db-available":true' "$1"
}
# Query indexer for 20 seconds waiting for migration to complete.
# Exit with error if still not ready.
# $1 - string to look for
# $2 - number of attempts (optional, default = 20)
function wait_for() {
local n=0
set +e
local READY
until [ "$n" -ge ${2:-20} ] || [ ! -z $READY ]; do
curl -q -s "$NET/health" | grep "$1" > /dev/null 2>&1 && READY=1
n=$((n+1))
sleep 1
done
set -e
if [ -z $READY ]; then
echo "Error: timed out waiting for $1."
curl -q -s "$NET/health"
exit 1
fi
}
# Kill indexer using the PIDFILE
function kill_indexer() {
if test -f "$PIDFILE"; then
kill -9 $(cat "$PIDFILE") > /dev/null 2>&1 || true
rm $PIDFILE
fi
}
####################
## Docker helpers ##
####################
# $1 - name of docker container to kill.
function kill_container() {
print_alert "Killing container - $1"
docker rm -f $1 > /dev/null 2>&1 || true
}
function start_postgres() {
if [ $# -ne 0 ]; then
print_alert "Unexpected number of arguments to start_postgres."
exit 1
fi
# Cleanup from last time
kill_container $POSTGRES_CONTAINER
print_alert "Starting - $POSTGRES_CONTAINER"
# Start postgres container...
docker run \
-d \
--name $POSTGRES_CONTAINER \
-e POSTGRES_USER=algorand \
-e POSTGRES_PASSWORD=algorand \
-e PGPASSWORD=algorand \
-p 5434:5432 \
postgres
sleep 5
print_alert "Started - $POSTGRES_CONTAINER"
}
# $1 - postgres database name.
function create_db() {
local DATABASE=$1
# Create DB
docker exec -it $POSTGRES_CONTAINER psql -Ualgorand -c "create database $DATABASE"
}
# $1 - postgres database name.
# $2 - pg_dump file to import into the database.
function initialize_db() {
local DATABASE=$1
local DUMPFILE=$2
print_alert "Initializing database ($DATABASE) with $DUMPFILE"
# load some data into it.
create_db $DATABASE
#docker exec -i $POSTGRES_CONTAINER psql -Ualgorand -c "\\l"
docker exec -i $POSTGRES_CONTAINER psql -Ualgorand -d $DATABASE < $DUMPFILE > /dev/null 2>&1
}
function cleanup() {
kill_container $POSTGRES_CONTAINER
rm $CURL_TEMPFILE > /dev/null 2>&1 || true
kill_indexer
}
#####################
## User Interaction #
#####################
# Interactive yes/no prompt
function ask () {
# https://djm.me/ask
local prompt default reply
if [ "${2:-}" = "Y" ]; then
prompt="Y/n"
default=Y
elif [ "${2:-}" = "N" ]; then
prompt="y/N"
default=N
else
prompt="y/n"
default=
fi
while true; do
# Ask the question (not using "read -p" as it uses stderr not stdout)
echo -n "$1 [$prompt] "
# Read the answer (use /dev/tty in case stdin is redirected from somewhere else)
read reply </dev/tty
# Default?
if [ -z "$reply" ]; then
reply=$default
fi
# Check if the reply is valid
case "$reply" in
Y*|y*) return 0 ;;
N*|n*) return 1 ;;
esac
done
}
############################################################################
## Integration tests are sometimes useful to run after a migration as well #
############################################################################
function cumulative_rewards_tests() {
rest_test 'Ensure migration updated specific account rewards.' '/v2/accounts/FZPGVIFCMHCE2HC2LEDD7IZQLKZVHRV5PENSD26Y2AOS3OWCYMKTY33UXI' 200 '"rewards":80000539878'
# Rewards / Rewind is now disabled
#rest_test 'Ensure migration updated specific account rewards @ round = 810.' '/v2/accounts/FZPGVIFCMHCE2HC2LEDD7IZQLKZVHRV5PENSD26Y2AOS3OWCYMKTY33UXI?round=810' 200 '"rewards":80000539878'
#rest_test 'Ensure migration updated specific account rewards @ round = 800.' '/v2/accounts/FZPGVIFCMHCE2HC2LEDD7IZQLKZVHRV5PENSD26Y2AOS3OWCYMKTY33UXI?round=800' 200 '"rewards":68000335902'
#rest_test 'Ensure migration updated specific account rewards @ round = 500.' '/v2/accounts/FZPGVIFCMHCE2HC2LEDD7IZQLKZVHRV5PENSD26Y2AOS3OWCYMKTY33UXI?round=500' 200 '"rewards":28000055972'
#rest_test 'Ensure migration updated specific account rewards @ round = 100.' '/v2/accounts/FZPGVIFCMHCE2HC2LEDD7IZQLKZVHRV5PENSD26Y2AOS3OWCYMKTY33UXI?round=100' 200 '"rewards":7999999996'
# One disabled test...
rest_test 'Ensure migration updated specific account rewards @ round = 810.' '/v2/accounts/FZPGVIFCMHCE2HC2LEDD7IZQLKZVHRV5PENSD26Y2AOS3OWCYMKTY33UXI?round=810' 200 '"rewards":0'
}
# $1 - the DB to query
function create_delete_tests() {
#####################
# Application Tests #
#####################
sql_test "[sql] app create (app-id=203)" $1 \
"select deleted, created_at, closed_at, index from app WHERE index = 203" \
"f|55||203"
rest_test "[rest] app create (app-id=203)" \
"/v2/applications/203?pretty" \
200 \
'"deleted": false' \
'"created-at-round": 55'
sql_test "[sql] app create & delete (app-id=82)" $1 \
"select deleted, created_at, closed_at, index from app WHERE index = 82" \
"t|13|37|82"
rest_test "[rest] app create & delete (app-id=82)" \
"/v2/applications/82?pretty" \
200 \
'"deleted": true' \
'"created-at-round": 13' \
'"deleted-at-round": 37'
###############
# Asset Tests #
###############
sql_test "[sql] asset create / destroy" $1 \
"select deleted, created_at, closed_at, index from asset WHERE index=135" \
"t|23|33|135"
rest_test "[rest - asset] asset create / destroy" \
"/v2/assets/135?pretty" \
200 \
'"deleted": true' \
'"created-at-round": 23' \
'"destroyed-at-round": 33' \
'"total": 0'
rest_test "[rest - account] asset create / destroy" \
"/v2/accounts/D2BFTG5GO2PUCLY2O4XIVW7WAQHON4DLX5R5V4O3MZWSWDKBNYZJYKHVBQ?pretty" \
200 \
'"created-at-round": 23' \
'"destroyed-at-round": 33' \
'"total": 0'
sql_test "[sql] asset create" $1 \
"select deleted, created_at, closed_at, index from asset WHERE index=168" \
"f|35||168"
rest_test "[rest - asset] asset create" \
"/v2/assets/168?pretty" \
200 \
'"deleted": false' \
'"created-at-round": 35' \
'"total": 1337'
rest_test "[rest - account] asset create" \
"/v2/accounts/D2BFTG5GO2PUCLY2O4XIVW7WAQHON4DLX5R5V4O3MZWSWDKBNYZJYKHVBQ?pretty" \
200 \
'"deleted": false' \
'"created-at-round": 35' \
'"total": 1337'
###########################
# Application Local Tests #
###########################
sql_test "[sql] app optin no closeout" $1 \
"select deleted, created_at, closed_at, app from account_app WHERE addr=decode('rAMD0F85toNMRuxVEqtxTODehNMcEebqq49p/BZ9rRs=', 'base64') AND app=85" \
"f|13||85"
rest_test "[rest] app optin no closeout" \
"/v2/accounts/VQBQHUC7HG3IGTCG5RKRFK3RJTQN5BGTDQI6N2VLR5U7YFT5VUNVAF57ZU?pretty" \
200 \
'"deleted": false' \
'"opted-in-at-round": 13' \
'"deleted": false' \
'"key": "Y1g="'
sql_test "[sql] app multiple optins first saved (it is also closed)" $1 \
"select deleted, created_at, closed_at, app from account_app WHERE addr=decode('Eze95btTASDFD/t5BDfgA2qvkSZtICa5pq1VSOUU0Y0=', 'base64') AND app=82" \
"t|15|35|82"
rest_test "[rest] app multiple optins first saved (it is also closed)" \
"/v2/accounts/CM333ZN3KMASBRIP7N4QIN7AANVK7EJGNUQCNONGVVKURZIU2GG7XJIZ4Q?pretty" \
200 \
'"deleted": true' \
'"opted-in-at-round": 15' \
'"closed-out-at-round": 35'
sql_test "[sql] app optin/optout/optin should leave last closed_at" $1 \
"select deleted, created_at, closed_at, app from account_app WHERE addr=decode('ZF6AVNLThS9R3lC9jO+c7DQxMGyJvOqrNSYQdZPBQ0Y=', 'base64') AND app=203" \
"f|57|59|203"
rest_test "[rest] app optin/optout/optin should leave last closed_at" \
"/v2/accounts/MRPIAVGS2OCS6UO6KC6YZ3445Q2DCMDMRG6OVKZVEYIHLE6BINDCIJ6J7U?pretty" \
200 \
'"deleted": false' \
'"opted-in-at-round": 57' \
'"closed-out-at-round": 59' \
'"num-byte-slice": 1'
#######################
# Asset Holding Tests #
#######################
sql_test "[sql] asset optin" $1 \
"select deleted, created_at, closed_at, assetid from account_asset WHERE addr=decode('MFkWBNGTXkuqhxtNVtRZYFN6jHUWeQQxqEn5cUp1DGs=', 'base64') AND assetid=27" \
"f|13||27"
rest_test "[rest - balances] asset optin" \
"/v2/assets/27/balances?pretty¤cy-less-than=100" \
200 \
'"deleted": false' \
'"opted-in-at-round": 13'
rest_test "[rest - account] asset optin" \
"/v2/accounts/GBMRMBGRSNPEXKUHDNGVNVCZMBJXVDDVCZ4QIMNIJH4XCSTVBRVYWWVCZA?pretty" \
200 \
'"deleted": false' \
'"opted-in-at-round": 13'
sql_test "[sql] asset optin / close-out" $1 \
"select deleted, created_at, closed_at, assetid from account_asset WHERE addr=decode('E/p3R9m9X0c7eAv9DapnDcuNGC47kU0BxIVdSgHaFbk=', 'base64') AND assetid=36" \
"t|16|25|36"
rest_test "[rest] asset optin" \
"/v2/assets/36/balances?pretty¤cy-less-than=100" \
200 \
'"deleted": true' \
'"opted-in-at-round": 16' \
'"opted-out-at-round": 25'
sql_test "[sql] asset optin / close-out / optin / close-out" $1 \
"select deleted, created_at, closed_at, assetid from account_asset WHERE addr=decode('ZF6AVNLThS9R3lC9jO+c7DQxMGyJvOqrNSYQdZPBQ0Y=', 'base64') AND assetid=135" \
"t|25|31|135"
rest_test "[rest] asset optin" \
"/v2/assets/135/balances?pretty¤cy-less-than=100" \
200 \
'"deleted": true' \
'"opted-in-at-round": 25' \
'"opted-out-at-round": 31'
sql_test "[sql] asset optin / close-out / optin" $1 \
"select deleted, created_at, closed_at, assetid from account_asset WHERE addr=decode('ZF6AVNLThS9R3lC9jO+c7DQxMGyJvOqrNSYQdZPBQ0Y=', 'base64') AND assetid=168" \
"f|37|39|168"
rest_test "[rest] asset optin" \
"/v2/assets/168/balances?pretty¤cy-less-than=100" \
200 \
'"deleted": false' \
'"opted-in-at-round": 37' \
'"opted-out-at-round": 39'
#################
# Account Tests #
#################
sql_test "[sql] genesis account with no transactions" $1 \
"select deleted, created_at, closed_at, microalgos from account WHERE addr = decode('4L294Wuqgwe0YXi236FDVI5RX3ayj4QL1QIloIyerC4=', 'base64')" \
"f|0||5000000000000000"
rest_test "[rest] genesis account with no transactions" \
"/v2/accounts/4C633YLLVKBQPNDBPC3N7IKDKSHFCX3WWKHYIC6VAIS2BDE6VQXACGG3BQ?pretty" \
200 \
'"deleted": false' \
'"created-at-round": 0'
sql_test "[sql] account created then never closed" $1 \
"select deleted, created_at, closed_at, microalgos from account WHERE addr = decode('HoJZm6Z2n0EvGncuitv2BA7m8Gu/Y9rx22ZtKw1BbjI=', 'base64')" \
"f|4||999999885998"
rest_test "[rest] account created then never closed" \
"/v2/accounts/D2BFTG5GO2PUCLY2O4XIVW7WAQHON4DLX5R5V4O3MZWSWDKBNYZJYKHVBQ?pretty" \
200 \
'"deleted": false' \
'"created-at-round": 4'
sql_test "[sql] account create close create" $1 \
"select deleted, created_at, closed_at, microalgos from account WHERE addr = decode('KbUa0wk9gB3BgAjQF0J9NqunWaFS+h4cdZdYgGfBes0=', 'base64')" \
"f|17|19|100000"
rest_test "[rest] account create close create" \
"/v2/accounts/FG2RVUYJHWAB3QMABDIBOQT5G2V2OWNBKL5B4HDVS5MIAZ6BPLGR65YW3Y?pretty" \
200 \
'"deleted": false' \
'"created-at-round": 17' \
'"closed-at-round": 19'
sql_test "[sql] account create close create close" $1 \
"select deleted, created_at, closed_at, microalgos from account WHERE addr = decode('8rpfPsaRRIyMVAnrhHF+SHpq9za99C1NknhTLGm5Xkw=', 'base64')" \
"t|9|15|0"
rest_test "[rest] account create close create close" \
"/v2/accounts/6K5F6PWGSFCIZDCUBHVYI4L6JB5GV5ZWXX2C2TMSPBJSY2NZLZGCF2NH5U?pretty" \
200 \
'"deleted": true' \
'"created-at-round": 9' \
'"closed-at-round": 15'
}
|
#!/bin/bash
#
#cat > input.txt << EOF
#Lorem Ipsum is simply dummy text of the printing and typesetting industry.
#Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown
#printer took a galley of type and scrambled it to make a type specimen book. It has survived not only
#five centuries, but also the leap into electronic typesetting, remaining essentially unchanged.
#It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages,
#and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.
#EOF
#
cat > input.txt << EOF
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium,
totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo.
Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos
qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur,
adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem.
Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi c
onsequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur,
vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
EOF
#
# Print the top X most used words.
#
RESULT=$(
echo -en "[" && \
echo -en $(cat input.txt | \
tr "." "\n" | tr "," "\n" | tr "?" "\n" | tr " " "\n" | tr [:lower:] [:upper:] | \
grep -v "^$" | sort | uniq -c | sort -r | head -n 7 | \
awk '{ print "{ \"word\": \"" $2 "\", \"count\": " $1 " }"}' | paste -s -d, - ) && \
echo -e "]"
)
#
JQ_PRESENT=$(which jq)
if [[ "${JQ_PRESENT}" == "" ]]
then
echo ${RESULT}
else
echo ${RESULT} | jq
fi
|
<filename>sql/100-reaction.sql<gh_stars>0
DROP TABLE IF EXISTS reaction;
CREATE TABLE reaction (nr INTEGER PRIMARY KEY NOT NULL, namn VARCHAR(80)) ENGINE InnoDB;
-- INSERT INTO reaction(nr,namn) VALUES(1,"Joel");
-- INSERT INTO reaction(nr,namn) VALUES(2,"Jan");
-- INSERT INTO reaction(nr,namn) VALUES(3,"Jens");
|
const rx = require('rxjs');
const fileManager = require('./file-manager');
const blancComponent = {
'red': '',
'green': '',
'blue': '',
'alpha': '',
'hex': ''
}
module.exports = {
loadAssetJson
}
/**
* Assetを取得する
*
* @param {*} path
* @returns
*/
function loadAssetJson(paths) {
return new Promise(function (resolve, reject) {
let colors = [];
let images = [];
paths.forEach(path => {
let color = parseColorSetObject(path);
let image = parseImageSetObject(path);
if (color) {
colors.push(color);
} else if (image) {
images.push(image);
}
});
let result = {
'colors': colors,
'images': images
}
resolve(result);
});
}
/**
* ImageSetオブジェクトを取得する
* @param {*} path
*/
function parseImageSetObject(path) {
let assetPath = path.replace(/\/Contents.json/g, '');
let matchStrings = assetPath.match(/([^/]+?)?$/);
let assetName = matchStrings[0];
let object = fileManager.loadJson(path);
let images = object['images'];
if (!images) {
// imagesがない時は、ImageSetではないので
// 早期リターン
return null;
}
let template = {};
for(const index in images) {
let image = images[index];
let idiom = image['idiom'];
let idiomImages = template[idiom] || {};
let scale = image['scale'];
let fileName = image['filename'];
let appearances = image['appearances'] || [];
let colorAppearance = appearances[0] || {};
let style = colorAppearance['value'] || 'any';
if (fileName) {
let styleImages = idiomImages[style] || {};
styleImages[scale] = fileName;
idiomImages[style] = styleImages;
template[idiom] = idiomImages;
}
}
return {
'name': assetName,
'path': assetPath,
'value': template
};
}
/**
* カラーセットオブジェクトを取り出す
* @param {*} path
*/
function parseColorSetObject(path) {
let template = {};
let assetPath = path.replace(/\/Contents.json/g, '');
let matchStrings = assetPath.match(/([^/]+?)?$/);
let assetName = matchStrings[0];
let object = fileManager.loadJson(path);
let colors = object['colors'];
if (!colors) {
// colorsがない時は、ColorSetではないので
// 早期リターン
return null;
}
// AnyタイプのColorComponentを抽出する
template['any'] = parseAnyColorObject(colors);
template['light'] = parseLightColorObject(colors);
template['dark'] = parseDarkColorObject(colors);
return {
'name': assetName,
'value': template
};
}
// ユニバーサルなColorObjectを取り出す
function parseAnyColorObject(colors) {
let result = blancComponent;
colors.forEach(function(color) {
let appearances = color['appearances'];
let colorComponent = color['color'];
if (color && !appearances && colorComponent) {
// appearancesが含まれない時がAnyのColorSet
result = parseColorComponent(colorComponent);
}
});
return result;
}
// LightなColorObjectを取り出す
function parseLightColorObject(colors) {
let result = blancComponent;
colors.forEach(function(color) {
let appearances = color['appearances'];
let colorComponent = color['color'];
if (color && appearances && colorComponent) {
let appearance = appearances[0];
let mode = appearance['value'];
if (mode == "light") {
result = parseColorComponent(colorComponent);
}
}
});
return result;
}
// DarkなColorObjectを取り出す
function parseDarkColorObject(colors) {
let result = blancComponent;
colors.forEach(function(color) {
let appearances = color['appearances'];
let colorComponent = color['color'];
if (color && appearances && colorComponent) {
let appearance = appearances[0];
let mode = appearance['value'];
if (mode == "dark") {
result = parseColorComponent(colorComponent);
}
}
});
return result;
}
function parseColorComponent(colorComponent) {
let red = Number(colorComponent['components']['red']);
let green = Number(colorComponent['components']['green']);
let blue = Number(colorComponent['components']['blue']);
let alpha = Number(colorComponent['components']['alpha']);
if (!red && !green && !blue) {
// rgbが取得できない場合はBlancComponent返却
return blancComponent;
} else {
// 0.0から1.0までの表記の場合0-255に変換する
if (red <= 1.0 && green <= 1.0 && blue <= 1.0) {
red = parseDecimalPointRGB(red);
green = parseDecimalPointRGB(green);
blue = parseDecimalPointRGB(blue);
}
let hexColor = parseHexColor(red, green, blue);
return {
'red': red,
'green': green,
'blue': blue,
'alpha': alpha,
'hex': hexColor
}
}
}
function parseDecimalPointRGB(colorValue) {
return colorValue * 255.0
}
function parseHexColor(red, green, blue) {
let redHex = ("0" + Math.floor(red).toString( 16 )).slice( -2 );
let greenHex = ("0" + Math.floor(green).toString( 16 )).slice( -2 );
let blueHex = ("0" + Math.floor(blue).toString( 16 )).slice( -2 );
return "#" + redHex + greenHex + blueHex
} |
require 'test_helper'
class ETLDimensionsEventTest < ActiveSupport::TestCase
def setup
@network_event = NetworkEvent.new(
name: '<NAME>',
location: locations(:tuggle),
program: programs(:network_night)
)
@network_event.save!
Etl::Dimensions::Event.run
@event_dimension = EventDimension.
where(network_event_id: @network_event.id).
first
end
test 'Event dimension ETL is idempotent' do
assert_no_difference('EventDimension.count') do
Etl::Dimensions::Event.run
end
end
test 'All events are extracted' do
assert_equal NetworkEvent.count, EventDimension.count
end
test 'Location is extracted' do
assert_equal @network_event.location.name, @event_dimension.location
end
test 'Program is extracted' do
assert_equal @network_event.program.name, @event_dimension.program
end
end |
#!/bin/sh
wget -c -O- 'http://ftp.apnic.net/apnic/stats/apnic/delegated-apnic-latest' | awk -F\| '/CN\|ipv4/ { printf("%s/%d\n", $4, 32-log($5)/log(2)) }' > \
./china_ip_list.txt
./CIDR.sh < china_ip_list.txt > cnip-merge.txt
echo create china_routes hash:net family inet hashsize 2048 maxelem 9999 >> ipset-save-china-router.list
while read ip;do
echo add china_routes "$ip" >> ipset-save-china-router.list
done < cnip-merge.txt
|
<reponame>Fryguy/manageiq-providers-scvmm<gh_stars>0
describe ManageIQ::Providers::Microsoft::InfraManager::Provision do
let(:vm_prov) do
FactoryGirl.create(
:miq_provision_microsoft,
:userid => @admin.userid,
:miq_request => @pr,
:source => @vm_template,
:request_type => 'template',
:state => 'pending',
:status => 'Ok',
:options => @options
)
end
let(:regex) { ManageIQ::Providers::Microsoft::InfraManager::Provision::Cloning::MT_POINT_REGEX }
context "MT_POINT_REGEX" do
it "matches a storage name with a drive letter" do
string = "file://foo.cfme-qe.redhat.com/J:/"
expect(string.scan(regex).flatten.first).to eql("J:/")
end
it "matches a storage name with a drive letter and path" do
string = "file://foo.cfme-qe.redhat.com/C:/ClusterStorage/netapp_crud_vol"
expect(string.scan(regex).flatten.first).to eql("C:/ClusterStorage/netapp_crud_vol")
end
it "matches a storage name without a drive letter" do
string = "file://foo123.redhat.com///clusterstore.xx-yy-redhat.com/cdrive"
expect(string.scan(regex).flatten.first).to eql("//clusterstore.xx-yy-redhat.com/cdrive")
end
end
context "A new provision request," do
before(:each) do
@os = OperatingSystem.new(:product_name => 'Microsoft Windows')
@admin = FactoryGirl.create(:user_admin)
@target_vm_name = 'clone test'
@ems = FactoryGirl.create(:ems_microsoft_with_authentication)
@vm_template = FactoryGirl.create(
:template_microsoft,
:name => "template1",
:ext_management_system => @ems,
:operating_system => @os,
:cpu_limit => -1,
:cpu_reserve => 0)
@vm = FactoryGirl.create(:vm_microsoft, :name => "vm1", :location => "abc/def.xml")
@pr = FactoryGirl.create(:miq_provision_request, :requester => @admin, :src_vm_id => @vm_template.id)
@options = {
:pass => 1,
:vm_name => @target_vm_name,
:vm_target_name => @target_vm_name,
:number_of_vms => 1,
:cpu_limit => -1,
:cpu_reserve => 0,
:provision_type => "microsoft",
:src_vm_id => [@vm_template.id, @vm_template.name]
}
end
context "SCVMM provisioning" do
it "#workflow" do
workflow_class = ManageIQ::Providers::Microsoft::InfraManager::ProvisionWorkflow
allow_any_instance_of(workflow_class).to receive(:get_dialogs).and_return(:dialogs => {})
expect(vm_prov.workflow.class).to eq workflow_class
expect(vm_prov.workflow_class).to eq workflow_class
end
end
context "#prepare_for_clone_task" do
before do
@host = FactoryGirl.create(:host_microsoft, :ems_ref => "test_ref")
allow(vm_prov).to receive(:dest_host).and_return(@host)
end
it "with default options" do
clone_options = vm_prov.prepare_for_clone_task
expect(clone_options[:name]).to eq(@target_vm_name)
expect(clone_options[:host]).to eq(@host)
end
end
context "#parse mount point" do
before do
ds_name = "file://server.local/C:/ClusterStorage/CLUSP04%20Prod%20Volume%203-1"
@datastore = FactoryGirl.create(:storage, :name => ds_name)
allow(vm_prov).to receive(:dest_datastore).and_return(@datastore)
end
it "valid drive" do
expect(vm_prov.dest_mount_point).to eq("C:\\ClusterStorage\\CLUSP04 Prod Volume 3-1")
end
end
context "#no network adapter available" do
it "set adapter" do
expect(vm_prov.network_adapter_ps_script).to be_nil
end
end
context "#network adapter available" do
before do
@options[:vlan] = "virtualnetwork1"
end
it "set adapter" do
expect(vm_prov.network_adapter_ps_script).to_not be_nil
end
end
context "#no cpu limit or reservation set" do
before do
@options[:number_of_sockets] = 2
@options[:cpu_limit] = nil
@options[:cpu_reserve] = nil
end
it "set vm" do
expect(vm_prov.cpu_ps_script).to eq("-CPUCount 2 ")
end
end
context "#cpu limit set" do
before do
@options[:cpu_limit] = 40
@options[:cpu_reserve] = nil
@options[:number_of_sockets] = 2
end
it "set vm" do
expect(vm_prov.cpu_ps_script).to eq("-CPUCount 2 -CPUMaximumPercent 40 ")
end
end
context "#cpu reservations set" do
before do
@options[:cpu_reserve] = 15
@options[:cpu_limit] = nil
@options[:number_of_sockets] = 2
end
it "set vm" do
expect(vm_prov.cpu_ps_script).to eq("-CPUCount 2 -CPUReserve 15 ")
end
end
end
end
|
package com.example.Tests.Example2.Service;
import com.example.Tests.Example2.ApplicationConfig.Customer;
import java.io.IOException;
import java.util.List;
/**
* @author <NAME> on 18.29.5
*/
public interface CustomerService {
void addCustomer() throws IOException;
void editCustomer() throws IOException;
void deleteCustomer() throws IOException;
void sortDateSurname();
void debtors();
void allPrice();
Customer find(long customerId);
List<Customer> findAll();
void printCustomers();
}
|
#!/bin/sh
go run -race cassabon.go $@
|
#!/bin/bash
# DEFAULT VALUES
GPUIDS="0"
NAME="toyProblems_depthCompletion_GPU"
NV_GPU="$GPUIDS"
# Allows everybody to use your host x server
xhost +
# Run docker
nvidia-docker run -it --rm --shm-size 12G \
-p 5700:5700\
--name "$NAME""0" \
-v /home/olorin/nicolas/bnn:/root/ \
-v /media/olorin/Documentos/datasets/kitti/depth/depth_prediction/data:/root/data/kitti_depth \
-v /media/olorin/Documentos/datasets/kitti/raw_data/data:/root/data/kitti_rgb/train \
-v /tmp/.X11-unix/:/tmp/.X11-unix/ \
--env "DISPLAY" \
--env QT_X11_NO_MITSHM=1 \
fregu856/evaluating_bdl:pytorch_pytorch_0.4_cuda9_cudnn7_evaluating_bdl bash
# -v /media/nicolas/nicolas_seagate/datasets/kitti/depth/depth_prediction/data:/root/data/kitti_depth \
|
/**
* @ 通过vuex-along - 实现持久化存储
*/
import Vue from 'vue'
import Vuex from 'vuex'
import shopCarModule from './modules/shopCar'
import userModule from './modules/user'
import createVuexAlong from "vuex-along";
Vue.use(Vuex)
export default new Vuex.Store({
strict: process.env.NODE_ENV !== 'production',
modules: {
shopCarModule,
userModule
},
plugins: [
createVuexAlong({ //默认存贮是永久存储
// name: "customName", 设置本地数据集合的名字,默认为 vuex-along
// local: { //永久存储
// },
// session: { //会话存储
// list: ["shopCarModule.shopMessage", "userModule.users"],
// },
}),
],
})
|
#!/bin/bash
#=================================================
# Description: DIY script
# Lisence: MIT
# Author: Discuzamoy
#=================================================
# Modify default IP
# rm -rf package/litte/luci-app-smartdns
# git clone https://github.com/kenzok8/openwrt-packages.git package/openwrt-package
# rm -rf package/openwrt-package/luci-app-ssr-plus
# git clone https://github.com/kenzok8/openwrt-packages.git
# git clone https://github.com/kenzok8/small.git
# ./scripts/feeds update -a
# ./scripts/feeds install
sed -i 's/OpenWrt/Discuzamoy/g' package/base-files/files/bin/config_generate
# sed -i 's/192.168.1.1/192.168.99.1/g' package/base-files/files/bin/config_generate
sed -i 's/root::0:0:99999:7:::/root:$1$s0nqgFtf$90M3V1HtmSsyb.gNF53Xd1:18307:0:99999:7:::/g' package/base-files/files/etc/shadow
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.