text
stringlengths 1
1.05M
|
|---|
#!/bin/sh
# We do not use crond because it brings problems when using docker user namespace or not running as cron
# https://serverfault.com/questions/836091/crond-cant-set-groups-operation-not-permitted
# I have tried playing with docker capabilities, but it is just cleaner to run the script every 2 minutes like this
# - Disadvantages:
# - we won't know at what exact time the script will be run
# - no email sent in case of failure, as this is the case for normal cron tasks
# - Advantage: no concurrent runs
#
# We *do* allow commands with non-zero return statuses,
# as cron tasks may fail and need to be re-run later
set -u;
while [ true ]; do
sleep 120;
/bin/sh /var/www/html/cron/cron.sh >/var/www/html/cache/logs/cron.log 2>&1;
done
|
import {
REPORT_ERROR,
GET_REPORTS,
ADD_REPORT
} from '../actions/types'
const initialState = {
reports: [],
loading: true,
error: {}
}
function reportReducer (state = initialState, action) {
const { type, payload } = action;
switch (type) {
case GET_REPORTS:
return {
...state,
reports: payload,
loading: false
}
case ADD_REPORT:
return {
...state,
reports: [payload, ...state.reports],
loading: false
}
case REPORT_ERROR:
return {
...state,
error: payload,
loading: false
}
default:
return state
}
}
export default reportReducer
|
<gh_stars>1-10
import sys
num = sys.argv[1]
i = 0 #index
with open("sim{}_chr21.bed".format(num), "w") as out_file:
with open("sim{}_chr21.fastq".format(num)) as in_file:
for line in in_file:
if line[0] == "@": #header lines only
line = line.strip().split()
if i%2 == 1 and line[2] == "HIC":
chrom1, start1 = line[3].split(":")
chrom2, start2 = line[4].split(":")
out_file.write("\t".join((chrom1, start1, str(int(start1)+1), chrom2, start2, str(int(start2)+1), "1")))
out_file.write("\n")
i += 1
in_file.close()
out_file.close()
|
#! /bin/bash
# CK installation script for TensorFlow models
#
# See CK LICENSE.txt for licensing details
# See CK COPYRIGHT.txt for copyright details
#
echo ""
echo "Compiling Protobuf... "
cd ${INSTALL_DIR}/${PACKAGE_SUB_DIR}/research
${CK_ENV_LIB_PROTOBUF_HOST_BIN}/protoc object_detection/protos/*.proto --python_out=./
if [ "${?}" != "0" ] ; then
echo "Error: Compiling Protobuf failed!"
exit 1
fi
|
export class QuizQuestion {
question: string;
answers: string[];
imgUrl: string;
videoUrl: string;
countdownSeconds: number;
score: number;
constructor(question: string, answers: string[], imgUrl: string, videoUrl: string, countdownSeconds: number, score: number) {
this.question = question;
this.answers = answers;
this.imgUrl = imgUrl;
this.videoUrl = videoUrl;
this.countdownSeconds = countdownSeconds;
this.score = score;
}
getJSON() {
var json: any = {};
json.question = this.question;
json.answers = this.answers;
json.img_url = this.imgUrl;
json.video_url = this.videoUrl;
json.countdown_seconds = this.countdownSeconds;
json.score = this.score;
return json;
}
}
|
<reponame>toba/goweb
package token_test
import (
"testing"
"github.com/toba/coreweb/token"
"github.com/stretchr/testify/assert"
)
func TestAuthorizationEncoding(t *testing.T) {
key := int64(123)
p := token.ForAuth(key, 1, 2, 3, 4)
enc, err := p.Encode()
assert.NoError(t, err)
assert.NotNil(t, enc)
dec, err := token.DecodeAuthorization(enc, true)
assert.NoError(t, err)
assert.NotNil(t, dec)
assert.Equal(t, key, dec.TenantID)
assert.Contains(t, dec.Permissions, uint16(3))
}
|
#!/bin/bash
# set largest number to first argument
largest=$1
# Loop over given arguments
for arg in "$@"
do
# Check if current argument is greater than current largest
if [ "$arg" -gt "$largest" ]; then
# Set current argument as new largest
largest=$arg
fi
done
# Print largest value
echo $largest
|
package com.amaljoyc.patterns.structural.facade;
/**
* Created by amaljoyc on 19.07.18.
*/
public interface Account {
void create();
}
|
# coding=utf-8
import setuptools
def package_data_dirs(source, sub_folders):
import os
dirs = []
for d in sub_folders:
for dirname, _, files in os.walk(os.path.join(source, d)):
dirname = os.path.relpath(dirname, source)
for f in files:
dirs.append(os.path.join(dirname, f))
return dirs
def params():
name = "OctoPrint-Pushbullet"
version = "0.0.1"
description = "Adds support to push OctoPrint events to a Pushbullet channel"
long_description = "TODO"
author = "kewljedi"
author_email = "kewljedi@gmail.com"
url = "https://github.com/kewljedi/octoprint-pushbullet"
license = "GPLv3"
packages = ["octoprint_pushbullet"]
package_data = {"octoprint_pushbullet": package_data_dirs('octoprint_pushbullet', ['static', 'templates'])}
include_package_data = True
zip_safe = False
install_requires = open("requirements.txt").read().split("\n")
entry_points = {
"octoprint.plugin": [
"pushbullet = octoprint_pushbullet"
]
}
return locals()
setuptools.setup(**params())
|
#!/bin/bash
set -e
echo "Running NPM install to update dependencies"
echo `date`
npm install
echo "Building MS2 bundles"
echo `date`
npm run compile
echo "Cleanup Documentation"
echo `date`
npm run cleandoc
echo "Checking syntax"
echo `date`
npm run lint
echo "Run MapStore2 tests"
echo `date`
npm test
echo "Creating Documentation"
echo `date`
npm run doc
echo "Building final WAR package"
echo `date`
if [ $# -eq 0 ]
then
mvn clean install
else
mvn clean install -Dmapstore2.version=$1
fi
echo "Final Cleanup"
echo `date`
npm run cleandoc
|
#!/usr/bin/env sh
# abort on errors
set -e
# build
npm run build
# navigate into the build output directory
cd dist
git init
git add -A
git commit -m 'deploy'
# if you are deploying to https://<USERNAME>.github.io/<REPO>
git push -f git@github.com:lanyshi/vue-firebase-chat.git master:gh-pages
cd -
|
use netcdf::{File as NcFile, Variable};
// Assume that Array3 is a 3-dimensional array type
struct Array3<T> {
// Implementation details for Array3
}
// Assume that Numeric is a trait representing numeric types
impl<T: Numeric> Array3<T> {
// Other methods and implementation details for Array3
}
impl<T: Numeric> File for Array3<T> {
fn load(path: &Path) -> Result<Array3<T>, Error> {
let file = NcFile::open(path).map_err(|e| Error::from(e))?;
let data_var = file.variable("data").ok_or("Missing variable 'data'.")?;
let shape = data_var.shape().to_vec();
let mut data = vec![T::default(); shape.iter().product()];
data_var.read(data.as_mut_slice(), None).map_err(|e| Error::from(e))?;
// Assuming Array3 has a method to create from raw data and shape
let array3 = Array3::from_raw_data(shape, data);
Ok(array3)
}
}
|
package domaine.bizz.interfaces;
import domaine.dto.UserInfoDto;
public interface UserInfoBizz extends UserInfoDto {
/**
* Vérifie si tous les champs sont valides.
*
*/
public void checkBeforeInsert();
}
|
<filename>m700.py
# coding: utf-8
'''
Communicate with Mitsubishi Electric CNC M700 series using EZSocket.
The object of communication is the machining center Mitsubishi CNC M700 / M700V / M70 / M70V.
'''
from enum import Enum
import threading
import pythoncom
import win32com.client
from win32com.client import VARIANT
class M700 ():
# Use the same instance for the same host connection in the same thread
# The same thread is because it is complicated to share COM objects with different threads
__connections = {}
@classmethod
def get_connection(cls, host):
key = str(threading.current_thread(). ident) + "_" + host
if key not in cls .__connections:
cls .__connections[key] = M700(host)
return cls .__connections[key]
#Unique value management for 1-255
__uno_list = [False] * 255
@classmethod
def alloc_unitno(cls):
'''Return an unused unit number in EZSocket.
Returns:
int: Unit number
'''
for i, v in enumerate(cls .__uno_list):
if v == False:
cls .__uno_list[i] = True
return i + 1
raise Exception("Unit number exceeds 255. Too many simultaneous connections")
@classmethod
def release_unitno(cls, uno):
cls .__uno_list[uno-1] = False
# --- In-class enumeration ---
class RunStatus (Enum):
'''Operation status (value corresponds to the value returned by M700)'''
NOT_AUTO_RUN = 0
AUTO_RUN = 1
class Position (Enum):
'''X, Y, Z coordinate specification (value corresponds to the returned value of M700)'''
X = 1
Y = 2
Z = 3
class ProgramType (Enum):
'''Main or subprogram (value corresponds to the value returned by M700)'''
MAIN = 0
SUB = 1
class NCProgramFileOpenMode (Enum):
'''Mode to use when opening program file in NC'''
READ = 1
WRITE = 2
OVER_WRITE = 3
__ip = None
__port = None
__isopen = False
__ezcom = None
__lock = threading.RLock()
def __init__(self, host):
'''
Args:
host: IP address: port number
'''
pythoncom.CoInitialize() # When executing with multiple threads, the COM object must be initialized
self.__ip, self.__port = host.split(':')
def __str__(self):
return self.__ip + ":" + self.__port + "" + ("Open" if self.__isopen else "Close")
def __open(self):
'''Open a connection for the IP and unit number given as arguments.
If it is called again after being opened, nothing is done. '''
if not self.__isopen:
self.__ezcom = win32com.client.Dispatch('EZNcAut.DispEZNcCommunication')
errcd = self.__ezcom.SetTCPIPProtocol(self.__ip, int(self.__port))
self.__unitno = M700.alloc_unitno()
self.__raise_error(errcd)
# Argument: Machine type number (fixed), unit number, timeout 100 milliseconds, COM host name
# Machine type 6 = EZNC_SYS_MELDAS700M (Machine Center Mitsubishi CNC M700 / M700V / M70 / M70V)
# The unit number must be unique within 1 ~ 255.
errcd = self.__ezcom.Open2(6, self.__unitno, 30, 'EZNC_LOCALHOST')
self.__raise_error(errcd)
self.__isopen = True
def close(self):
'''Close the connection.
No exception is returned to the caller if an internal error occurs
'''
try:
M700.release_unitno(self.__unitno) # Release unit number
self.__isopen = False
self.__ezcom.Close()
except:
pass
try:
self.__ezcom.Release()
except:
pass
def is_open(self):
'''After __open () processing, check if the connection is open.
Return:
bool: True if the connection is open
'''
with self.__lock:
try:
self.__open()
except:
pass
return self.__isopen
# --- NC information acquisition related ---
def get_drive_infomation(self):
'''Return available drive names.
Note: The drive name is originally obtained as "drive name: CRLF drive name: CRLF ... drive name: CRLF \ 0".
If there are multiple drives, you need to split.
Return:
str: Drive information
'''
with self.__lock:
self.__open()
errcd, drive_info = self.__ezcom.File_GetDriveInformation()
self.__raise_error(errcd)
return drive_info[0: 4]
def get_version(self):
'''Return NC version
Return:
str: Version information
'''
with self.__lock:
self.__open()
errcd, version = self.__ezcom.System_GetVersion(1, 0)
self.__raise_error(errcd)
return version
def get_current_position(self, axisno):
'''Get current coordinate position.
Args:
axisno (M700.Position. *): Pass X or Y or Z as an argument.
Return:
float: Current coordinate position
'''
with self.__lock:
if not isinstance(axisno, M700.Position):
raise Exception('Specify the enumeration [M700.Position. *]')
# in_1: The axis you want to get. 1 = x, 2 = y, 3 = z
# pos: Current position.
self.__open()
errcd, pos = self.__ezcom.Position_GetCurrentPosition(axisno.value)
self.__raise_error(errcd)
return pos
def get_run_status(self):
'''Obtain operating status.
Return:
M700.RunStatus: Returns the enumeration [M700.RunStatus].
'''
with self.__lock:
# in_1: Driving type. 1 = Is automatic operation in progress?
# status: 0 = Not in automatic operation. 1 = automatic driving
self.__open()
errcd, status = self.__ezcom.Status_GetRunStatus(1)
self.__raise_error(errcd)
if M700.RunStatus.AUTO_RUN.value == status:
return M700.RunStatus.AUTO_RUN
else:
return M700.RunStatus.NOT_AUTO_RUN
def get_rpm (self):
'''Obtain rotation speed (0 ~ [rpm]).
Return:
int: number of rotations
'''
with self.__lock:
# in_1: Specify the parameter number of the specified spindle. 2 = Spindle (SR, SF) rotation speed. 0 ~ [rpm]
# in_2: Specify the spindle number.
# data: Returns the spindle status.
# info: Get spindle information as UNICODE character string.
self.__open ()
errcd, data, info = self.__ezcom.Monitor_GetSpindleMonitor (2, 1)
self.__raise_error (errcd)
return data
def get_load (self):
'''Load (0 ~ [%]) acquisition.
Return:
int: load
'''
with self.__lock:
# in_1: Specify the parameter number of the specified spindle. 3 = Load. Spindle motor load. 0 ~ [%]
# in_2: Specify the spindle number.
# data: Returns the spindle status.
# info: Get spindle information as UNICODE character string.
self.__open ()
errcd, data, info = self.__ezcom.Monitor_GetSpindleMonitor (3, 1)
self.__raise_error (errcd)
return data
def get_cycle_counter (self):
'''
'''
with self.__lock:
# As per docs, IIndex = 10 returns cycle counter
self.__open ()
errcd, data, info = self.__ezcom.Monitor_GetSpindleMonitor (10, 1)
self.__raise_error (errcd)
return data
def get_var_name (self, iindex):
with self.__lock:
self.__open ()
errcd, data = self.__ezcom.CommonVarialbe_GetName (iindex)
self.__raise_error (errcd)
return data
def get_mgn_size (self):
'''Magazine size acquisition.
Return:
int: magazine size
'''
with self.__lock:
# size: Total number of magazine pots. Value: 0 to 360 (maximum).
self.__open ()
errcd, size = self.__ezcom.ATC_GetMGNSize ()
self.__raise_error (errcd)
return size
def get_mgn_ready (self):
'''Get the number of installed tool.
Return:
int: Tool number
'''
with self.__lock:
# in_1: Specify the magazine number. Value: 1 to 2 (In the M700 / M800 series, setting a value has no effect)
# in_2: Specify the standby state. 0 = Installed tool number, 1 = Standby 1 tool number. Same as 2,3,4 = 1.
# toolno: Returns the tool number. Value is from 1 to 99999999 (maximum)
self.__open ()
errcd, toolno = self.__ezcom.ATC_GetMGNReady2 (1, 0)
self.__raise_error (errcd)
return toolno
def get_toolset_size (self):
'''Get size of toolset
Tool set means correction value NO
Return:
int: Tool set size
'''
with self.__lock:
# plSize: 200 = 200 [set]
self.__open ()
errcd, size = self.__ezcom.Tool_GetToolSetSize ()
self.__raise_error (errcd)
return size
def get_tool_offset_h (self, toolset_no):
'''Tool set number long offset value
Return:
int: long
'''
with self.__lock:
# lType: Tool offset type 4 = Machining center type II
# lKind: Offset type 0 = long, 1 = long wear, 2 = diameter, 3 = diameter wear
# lToolSetNo: Tool set number
# pdOffset As DOUBLE * (O) Offset amount
# plNo As LONG * (O) Virtual cutting edge number
self.__open ()
errcd, h, plno = self.__ezcom.Tool_GetOffset2 (4, 0, toolset_no)
self.__raise_error (errcd)
return h
def get_tool_offset_d (self, toolset_no):
'''Long offset diameter of tool set number
Return:
int: Diameter
'''
with self.__lock:
self.__open ()
errcd, d, plno = self.__ezcom.Tool_GetOffset2 (4, 2, toolset_no)
self.__raise_error (errcd)
return d
def set_tool_offset_h (self, toolset_no, h):
'''Set tool set number offset length compensation value '''
with self.__lock:
# lType: Tool offset type 4 = Machining center type II
# lKind: Offset type 0 = long, 1 = long wear, 2 = diameter, 3 = diameter wear
# lToolSetNo: Tool set number
# pdOffset As DOUBLE * Offset amount
# plNo As LONG * Virtual cutting edge number
self.__open ()
errcd = self.__ezcom.Tool_SetOffset (4, 0, toolset_no, h, 0)
self.__raise_error (errcd)
errcd = self.__ezcom.Tool_SetOffset (4, 2, toolset_no, d, 0)
self.__raise_error (errcd)
def set_tool_offset_d (self, toolset_no, d):
'''Set tool set number offset diameter compensation value'''
with self.__lock:
self.__open ()
errcd = self.__ezcom.Tool_SetOffset (4, 2, toolset_no, d, 0)
self.__raise_error (errcd)
def get_program_number (self, progtype):
'''Obtains the program number during search completion or automatic operation.
Args:
progtype (M700.ProgramType. *): Pass MAIN or SUB as an argument.
Return:
str: Program number
'''
with self.__lock:
if not isinstance (progtype, M700.ProgramType):
raise Exception ('Please specify enumeration [M700.ProgramType. *]')
# in_1: 0 = Main program, 1 = Sub program
self.__open ()
errcd, msg = self.__ezcom.Program_GetProgramNumber2 (progtype.value)
self.__raise_error (errcd)
return msg
def get_alarm (self):
'''Get alerts.
Return:
str: error message
'''
with self.__lock:
# in_1: Number of message lines to retrieve. 1 to 10 (maximum)
# in_2: Alarm type to be acquired.
# msg: Error message
self.__open ()
errcd, msg = self.__ezcom.System_GetAlarm2 (3, 0)
self.__raise_error (errcd)
return msg
# --- NC program file operation related ---
def read_file (self, path):
'''Read the file.
Args:
path (str): Absolute path exp) M01: \ PRG \ USER \ 100
Return:
bytes: Returns the read byte data.
'''
with self.__lock:
self.__open ()
try:
errcd = self.__ezcom.File_OpenFile3 (path, M700.NCProgramFileOpenMode.READ.value)
self.__raise_error (errcd)
result = b''
while True:
errcd, data = self.__ezcom.File_ReadFile2 (256) #The size of data to be read at one time in bytes
self.__raise_error (errcd)
result += data #VARIANT of the read byte data array
if len (data) <256:
break
return result
finally:
try:
self.__ezcom.File_CloseFile2 ()
except:
pass
def write_file (self, path, data):
'''Write to file.
Args:
path (str): Absolute path exp) M01: \ PRG \ USER \ 100
data (bytes): Pass the data to be written as byte data
'''
with self.__lock:
self.__open ()
try:
errcd = self.__ezcom.File_OpenFile3 (path, M700.NCProgramFileOpenMode.OVER_WRITE.value)
self.__raise_error (errcd)
errcd = self.__ezcom.File_WriteFile (memoryview (data)) # Array of byte data to write
self.__raise_error (errcd)
finally:
try:
self.__ezcom.File_CloseFile2 ()
except:
pass
def delete_file (self, path):
'''Delete the file with the specified path name.
Args:
path (str): Absolute path exp) M01: \ PRG \ USER \ 100
'''
with self.__lock:
self.__open ()
errcd = self.__ezcom.File_Delete2 (path)
self.__raise_error (errcd)
# --- NC directory operation related-
def find_dir (self, path):
'''Search for a file by path name.
Args:
path (str): Directory path exp) M01: \ PRG \ USER \
Return:
list: A list of search results. The contents are managed as dictionary data.
exp) [{'type': 'file', 'name': '100', 'size': '19', 'comment': 'BY IKEHARA'}, ...]
'''
with self.__lock:
result = []
try:
self.__open ()
# M01 → M unit number hexadecimal
path = path.replace ("M01", "M {: 02X}". format (self.__unitno))
# Get directory information in the specified path (-1 will get the string of 'directory name \ t size')
errcd, info = self.__ezcom.File_FindDir2 (path, -1)
self.__raise_error (errcd)
while True:
# Directory information available
if errcd> 1:
dir_info = info.split ('\ t')
data = {
'type': 'folder',
'name': dir_info [0],
'size': '{:,}'. format (int (dir_info [1])),
'comment': None
}
result.append (data)
else:
break
errcd, info = self.__ezcom.File_FindNextDir2 ()
self.__raise_error (errcd)
# Reset once
errcd = self.__ezcom.File_ResetDir ()
self.__raise_error (errcd)
# Get the file information in the specified path (Get the string of 'file name \ t size \ t comment' in 5)
errcd, info = self.__ezcom.File_FindDir2 (path, 5)
self.__raise_error (errcd)
while True:
# File information available
if errcd> 1:
dir_info = info.split ('\ t')
data = {
'type': 'file',
'name': dir_info [0],
'size': '{:,}'. format (int (dir_info [1])),
'comment': dir_info [2]
}
result.append (data)
else:
break
errcd, info = self.__ezcom.File_FindNextDir2 ()
self.__raise_error (errcd)
finally:
try:
errcd = self.__ezcom.File_ResetDir ()
self.__raise_error (errcd)
except:
pass
return result
# --- NC device operation related ---
def __setting_dev (self, dev, data = 0):
'''Set the device.
Args:
dev (str): Device specification. exp) M810, D10
data (int): value. 1 to raise the bit, 0 to lower it.
In the case of read_dev, put an appropriate character as a dummy.
'''
data_type = 0 # 1 or 4 or 8 exp) M = 1 (bit type 1bit), D = 4 (word type 16bit)
if dev [0] == 'M':
data_type = 1
elif dev[0] == 'D':
data_type = 4
else:
Exception('Set M device or D device.')
# in_1: Device character string (Specify the device character string array to be set as VARIANT)
# # in_2: Data type
# in_3: Device value array
vDevice = VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_BSTR, [dev])
vDataType = VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_I4, [data_type])
vValue = VARIANT(pythoncom.VT_ARRAY | pythoncom.VT_I4, [data]) # 書き込むデータは現在数値のみ
errcd = self.__ezcom.Device_SetDevice(vDevice, vDataType, vValue)
self.__raise_error(errcd)
def __delall_dev(self):
'''Delete all device settings。'''
errcd = self.__ezcom.Device_DeleteAll()
self.__raise_error(errcd)
def read_dev(self, dev):
'''Device read. Read the device value set by __setting_dev.
Args:
dev (str): Device number exp) M900
Return:
int: Returns the value of the read data
'''
with self.__lock:
self.__open()
self.__setting_dev(dev)
errcd, value = self.__ezcom.Device_Read() # value:デバイス値配列が返ってくる。
self.__raise_error(errcd)
self.__delall_dev()
return value[0]
def write_dev(self, dev, data):
'''
Device write. Write the value to the device set with __setting_dev.
Args:
dev (str): Device number exp) M900
data (int): Value to write
'''
with self.__lock:
self.__open()
self.__setting_dev(dev, data)
errcd = self.__ezcom.Device_Write()
self.__delall_dev()
self.__raise_error(errcd)
# --- Error Outputs ---
def __raise_error(self, errcd):
'''Return the error contents as an Exception from the error code.
If there is no error (error code is 0), do nothing.
Error contents are registered in the dictionary in the form of {'hexadecimal error code': 'error detail message'}.
Raises:
Exception: error message
'''
__errmap = {
"0x80a00101": "Communication line not open",
"0x80a00104": "Double Open Error",
"0x80a00105": "Incorrect data type of argument",
"0x80a00106": "Invalid data range of argument",
"0x80a00107": "Not Supported",
"0x80a00109": "Can't open communication line",
"0x80a0010a": "The argument is a null pointer.",
"0x80a0010b": "Invalid data for argument",
"0x80a0010c": "COMM port handle error",
"0x80b00101": "Cannot reserve memory",
"0x80b00102": "EZSocketPc error can not be obtained",
"0x80b00201": "Incorrect mode",
"0x80b00202": "Open file not open",
"0x80b00203": "File already exists",
"0x80b00204": "already open file",
"0x80b00205": "Can't create temporary file",
"0x80b00206": "File is not open in write mode",
"0x80b00207": "Incorrect write data size",
"0x80b00208": "cannot write",
"0x80b00209": "File not opened in read mode",
"0x80b0020a": "unreadable state",
"0x80b0020b": "Can't create temporary file",
"0x80b0020c": "File does not exist (read mode)",
"0x80b0020d": "Can't open file",
"0x80b0020e": "Invalid file path",
"0x80b0020f": "The read file is invalid",
"0x80b00210": "Invalid write file",
"0x80b00301": "Incorrect host name when connecting locally due to automation call",
"0x80b00302": "TCP / IP communication is not set",
"0x80b00303": "Cannot set because you are already communicating",
"0x80b00304": "There is no lower module",
"0x80b00305": "Can not create EZSocketPc object",
"0x80b00401": "Data does not exist",
"0x80b00402": "Data duplication",
"0x80b00501": "No parameter information file",
"0x80020190": "NC card number incorrect",
"0x80020102": "The device has not been opened",
"0x80020132": "Invalid Command",
"0x80020133": "Invalid communication parameter data range",
"0x80030143": "There is a problem with the file system",
"0x80030191": "The directory does not exist",
"0x8003019b": "The drive does not exist",
"0x800301a2": "Directory does not exist",
"0x800301a8": "The drive does not exist",
"0x80050d90": "Invalid system / axis specification",
"0x80050d02": "Incorrect alarm type",
"0x80050d03": "Error in communication data between NC and PC",
"0x80041194": "Incorrect specification of life management data type",
"0x80041195": "Setting data range over",
"0x80041196": "Setting tool number mismatch",
"0x80041197": "Specified tool number out of specification",
"0x80040190": "Invalid system / axis specification",
"0x80040191": "Blank number incorrect",
"0x80040192": "Incorrect Subdivision Number",
"0x80040196": "I can not fit into the buffer prepared by the application",
"0x80040197": "Invalid data type",
"0x8004019d": "The data can not be read",
"0x8004019f": "write only data",
"0x800401a0": "axis specification invalid",
"0x800401a1": "Data number invalid",
"0x800401a3": "No read data",
"0x8004019a": "Invalid read data range",
"0x80040290": "Invalid system / axis specification",
"0x80040291": "Blank number incorrect",
"0x80040292": "Incorrect Subdivision Number",
"0x80040296": "I can not fit into the buffer prepared by the application",
"0x80040297": "Incorrect data type",
"0x8004029b": "Read only data",
"0x8004029e": "Data can not be written",
"0x800402a0": "axis specification invalid",
"0x8004024d": "Secure Password Locked",
"0x800402a2": "Format aborted due to invalid SRAM open parameter",
"0x800402a4": "Can't register edit file (already editing)",
"0x800402a5": "Can't release edit file",
"0x800402a3": "No data to write to",
"0x8004029a": "Invalid write data range",
"0x800402a6": "Security Password not set",
"0x800402a7": "Safety Data Integrity Check Error",
"0x800402a9": "No data type for safety",
"0x800402a8": "Can not write in tool data sort",
"0x80040501": "High-speed readout not registered",
"0x80040402": "priority specified incorrectly",
"0x80040401": "The number of registrations has been exceeded",
"0x80040490": "Incorrect Address",
"0x80040491": "Blank number incorrect",
"0x80040492": "Incorrect Subdivision Number",
"0x80040497": "Incorrect data type",
"0x8004049b": "Read only data",
"0x8004049d": "The data can not be read",
"0x8004049f": "write only data",
"0x800404a0": "Axis specification invalid",
"0x80040ba3": "No rethreading position set",
"0x80030101": "Another directory is already open",
"0x80030103": "Data size over",
"0x80030148": "Long file name",
"0x80030198": "Invalid file name format",
"0x80030190": "Not Opened",
"0x80030194": "File information read error",
"0x80030102": "Another directory has already been opened (PC only)",
"0x800301a0": "not open",
"0x800301a1": "File does not exist",
"0x800301a5": "File information read error",
"0x80030447": "Can not copy (during operation)",
"0x80030403": "Over registration number",
"0x80030401": "The destination file already exists",
"0x80030443": "There is a problem with the file system",
"0x80030448": "Long file name",
"0x80030498": "Invalid file name format",
"0x80030404": "Memory capacity over",
"0x80030491": "Directory does not exist",
"0x8003049b": "The drive does not exist",
"0x80030442": "File does not exist",
"0x80030446": "Can not copy (PLC in operation)",
"0x80030494": "The transfer source file can not be read",
"0x80030495": "Can not write to destination file",
"0x8003044a": "Can not copy (protect)",
"0x80030405": "Verification error",
"0x80030449": "does not support the matching feature",
"0x8003044c": "Copying files",
"0x80030490": "file not open",
"0x8003044d": "Secure Password Locked",
"0x8003049d": "Invalid file format",
"0x8003049e": "The password is different",
"0x800304a4": "File can not be created (PC only)",
"0x800304a3": "Can't open file (PC only)",
"0x80030402": "The destination file already exists",
"0x800304a7": "Invalid file name format",
"0x800304a2": "Directory does not exist",
"0x800304a8": "The drive does not exist",
"0x800304a1": "File does not exist",
"0x800304a5": "The transfer source file can not be read",
"0x800304a6": "Can not write to destination file",
"0x80030406": "Disk capacity over",
"0x800304a0": "file not open",
"0x80030201": "Can't delete files",
"0x80030242": "File does not exist",
"0x80030243": "There is a problem with the file system",
"0x80030247": "Can not delete (during operation)",
"0x80030248": "long file name",
"0x8003024a": "The file can not be deleted (protected)",
"0x80030291": "Directory does not exist",
"0x80030298": "Invalid file name format",
"0x8003029b": "The drive does not exist",
"0x80030202": "Can't delete files",
"0x800302a7": "Invalid file name format",
"0x800302a2": "Directory does not exist",
"0x800302a8": "The drive does not exist",
"0x800302a1": "File does not exist",
"0x80030301": "New file name already exists",
"0x80030342": "File does not exist",
"0x80030343": "There is a problem with the file system",
"0x80030347": "Can not rename (during operation)",
"0x80030348": "Long file name",
"0x8003034a": "Can not rename (Protect)",
"0x80030391": "The directory does not exist",
"0x80030398": "Invalid file name format",
"0x8003039b": "The drive does not exist",
"0x80030303": "Can't rename",
"0x80030305": "The new and old file names are the same",
"0x80030302": "New file name already exists",
"0x800303a7": "Invalid file name format",
"0x800303a2": "The directory does not exist",
"0x800303a8": "The drive does not exist",
"0x800303a1": "File does not exist",
"0x80030691": "The directory does not exist",
"0x8003069b": "The drive does not exist",
"0x80030643": "There is a problem with the file system",
"0x80030648": "Long file name or incorrect format",
"0x800306a2": "Directory does not exist (PC only)",
"0x800306a8": "Drive does not exist (PC only)",
"0x80030701": "I can not fit into the buffer prepared by the application",
"0x80030794": "Drive information read error",
"0x82020001": "already open",
"0x82020002": "Not Opened",
"0x82020004": "card does not exist",
"0x82020006": "Invalid Channel Number",
"0x82020007": "The file descriptor is invalid",
"0x8202000a": "Not Connected",
"0x8202000b": "not closed",
"0x82020014": "timeout",
"0x82020015": "Invalid data",
"0x82020016": "Canceled due to cancel request",
"0x82020017": "Incorrect packet size",
"0x82020018": "Ended by task end",
"0x82020032": "The command is invalid",
"0x82020033": "Incorrect setting data",
"0x80060001": "Data read cache disabled",
"0x80060090": "Incorrect Address",
"0x80060091": "Blank number incorrect",
"0x80060092": "Incorrect Subdivision Number",
"0x80060097": "Incorrect data type",
"0x8006009a": "Invalid data range",
"0x8006009d": "The data can not be read",
"0x8006009f": "Incorrect data type",
"0x800600a0": "axis specification invalid",
"0x80070140": "Can't reserve work area",
"0x80070142": "Can't open file",
"0x80070147": "The file can not be opened (during operation)",
"0x80070148": "long file path",
"0x80070149": "Not supported (CF not supported)",
"0x80070192": "already open",
"0x80070199": "The maximum number of open files has been exceeded",
"0x8007019f": "Can not open during tool data sorting",
"0x800701b0": "Security password not certified",
"0x80070290": "File not open",
"0x80070340": "Can't reserve work area",
"0x80070347": "File can not be created (during operation)",
"0x80070348": "long file path",
"0x80070349": "Not supported (CF not supported)",
"0x80070392": "Already generated",
"0x80070393": "Can't create file",
"0x80070399": "The maximum number of open files has been exceeded",
"0x8007039b": "The drive does not exist",
"0x80070490": "file not open",
"0x80070494": "File information read error",
"0x80070549": "Not writable",
"0x80070590": "File not open",
"0x80070595": "File write error",
"0x80070740": "File Delete Error",
"0x80070742": "File does not exist 3-6",
"0x80070747": "The file can not be deleted (during operation)",
"0x80070748": "long file path",
"0x80070749": "Not supported (CF not supported)",
"0x80070792": "file is open",
"0x8007079b": "The drive does not exist",
"0x80070842": "File does not exist",
"0x80070843": "File that can not be renamed",
"0x80070848": "long file path",
"0x80070849": "Not supported (CF not supported)",
"0x80070892": "The file is open",
"0x80070899": "The maximum number of open files has been exceeded",
"0x8007089b": "The drive does not exist",
"0x80070944": "Invalid command (not supported)",
"0x80070990": "Not Opened",
"0x80070994": "Read error",
"0x80070995": "Write Error",
"0x80070996": "I can not fit into the buffer prepared by the application",
"0x80070997": "Invalid data type",
"0x80070949": "Not supported (CF not supported)",
"0x80070a40": "Can't reserve work area",
"0x80070a47": "The directory can not be opened (during operation)",
"0x80070a48": "long file path",
"0x80070a49": "Not supported (CF not supported)",
"0x80070a91": "Directory does not exist",
"0x80070a92": "already open",
"0x80070a99": "The maximum number of open directories has been exceeded",
"0x80070a9b": "The drive does not exist",
"0x80070b90": "The directory has not been opened",
"0x80070b91": "Directory does not exist",
"0x80070b96": "I can not fit into the buffer prepared by the application",
"0x80070d90": "The directory has not been opened",
"0x80070e48": "long file path",
"0x80070e49": "Supported (CF not supported)",
"0x80070e94": "Error reading file information",
"0x80070e99": "The maximum number of open files has been exceeded",
"0x80070e9b": "The drive does not exist",
"0x80070f48": "long file path",
"0x80070f49": "Not supported (CF not supported)",
"0x80070f94": "Error reading file information",
"0x80070f90": "The file has not been opened",
"0x80070f9b": "The drive does not exist",
"0x8007099c": "Sorry, open format invalid and abort format",
"0xf00000ff": "Invalid argument",
"0xffffffff": "data can not be read / written"
}
# 0: エラーなし, 1以上: File_FindDir2時にファイル情報ありの時
if errcd == 0 or errcd >= 1:
return
hex_str = '0x' + format(errcd & 0xffffffff, 'x')
msg = __errmap.get(hex_str, 'Unkown error') # 辞書に無ければUnkown error
# '通信回線がオープンされてない'or'コネクトされていない'ならclose扱い
if '0x80a00101' == hex_str or '0x8202000a' == hex_str:
self.close()
raise Exception('Error=(IP:' + self.__ip + ') ' + hex_str + ': ' + msg)
|
import {Controller, Get, HttpStatus, Param, Query, Res, UseGuards} from '@nestjs/common';
import {AuthOptGuard} from "../../../auth/auth-opt.gurad";
import {Usr} from "../../../user/user.decorator";
import {User} from "../../../user/user.model";
import {Response} from "express";
import {LinkService} from "../../link/link.service";
@Controller('global/links')
export class GlobalLinkController {
constructor(private linkService: LinkService) {
}
@Get('statistics')
@UseGuards(AuthOptGuard)
getHistoryStats(@Usr() user: User,
@Param('short') short: string,
@Query('start') start: string,
@Query('end') end: string,
@Query('interval') interval: "minutes" | "hours" | "days" | "months",
@Res() res: Response,) {
return this.linkService
.getStatsAll(user, interval, start, end)
.then(tLink => {
res.status(HttpStatus.OK).json(tLink);
})
.catch((err) => {
throw err;
});
}
}
|
def ui():
items = []
while True:
print("1. Add item")
print("2. Remove item")
print("3. Display items")
print("4. Exit")
choice = input("Enter your choice: ")
if choice == "1":
item = input("Enter the item to add: ")
items.append(item)
elif choice == "2":
if not items:
print("List is empty. No items to remove.")
else:
index = int(input("Enter the index of the item to remove: "))
if 0 <= index < len(items):
del items[index]
else:
print("Invalid index. Please try again.")
elif choice == "3":
if not items:
print("List is empty.")
else:
print("Items in the list:")
for i, item in enumerate(items):
print(f"{i}: {item}")
elif choice == "4":
print("Exiting the program. Goodbye!")
break
else:
print("Invalid choice. Please try again.")
|
<reponame>STShenZhaoliang/java-day-by-day<gh_stars>0
package cn.st.test;
import cn.st.domain.User;
import cn.st.mapper.UserMapper;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
public class MyBatisTest4 {
private UserMapper mapper;
@Before
public void before() throws IOException {
InputStream resourceAsStream = Resources.getResourceAsStream("sqlMapConfig.xml");
SqlSessionFactory sqlSessionFactory = new SqlSessionFactoryBuilder().build(resourceAsStream);
SqlSession sqlSession = sqlSessionFactory.openSession(true);
mapper = sqlSession.getMapper(UserMapper.class);
}
@Test
public void testSave(){
List<User> userAndRoleAll = mapper.findUserAndRoleAll();
for (User user : userAndRoleAll) {
System.out.println(user);
}
}
}
|
import { Component } from '@angular/core';
@Component({
selector: 'app-select-input',
template: `
<input type="text"
[(ngModel)]="searchText"
(ngModelChange)="onSearchInputChange($event)"
/>
<ul *ngIf="showList" class="list">
<li *ngFor="let item of matchingItems"
(click)="onItemSelected(item)">
{{ item }}
</li>
</ul>
`
})
export class SelectInputComponent {
searchText: string;
showList: boolean;
matchingItems = [];
inputItems = ["apple", "orange", "banana", "grapes", "watermelon", "strawberry"];
onSearchInputChange(event) {
this.showList = true;
this.matchingItems = [];
let searchTerm = event.target.value;
this.matchingItems = this.inputItems.filter((item) => {
let lowercaseItem = item.toLowerCase();
return lowercaseItem.startsWith(searchTerm);
});
}
onItemSelected(item) {
this.searchText = item;
this.showList = false;
}
}
|
<html>
<head>
<title>Purchase Order Form</title>
</head>
<body>
<form action="" method="post">
<div>
<label>Name: </label>
<input type="text" name="name" />
</div>
<div>
<label>Address: </label>
<input type="text" name="address" />
</div>
<div>
<label>Item Description: </label>
<input type="text" name="item_description" />
</div>
<div>
<label>Quantity: </label>
<input type="number" name="quantity" />
</div>
<div>
<input type="submit" value="Submit Order" />
</div>
</form>
</body>
</html>
|
var today = new Date();
var dd = String(today.getDate()).padStart(2, '0');
var mm = String(today.getMonth() + 1).padStart(2, '0');
var yyyy = today.getFullYear();
today = mm + '/' + dd + '/' + yyyy;
console.log(today);
|
<filename>acmicpc/3040/3040.py
numbers = [] # [7, 8, 10, 13, 15, 19, 20, 23, 25]
for _ in range(9):
numbers.append(int(input()))
summation = sum(numbers)
for i in range(len(numbers)):
is_hundred = False
for j in range(1, (len(numbers))):
if i == j:
continue
if summation - (numbers[i] + numbers[j]) == 100:
is_hundred = True
for k in range(9):
if k != i and k != j:
print(numbers[k])
if is_hundred == True:
break
|
#!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2013 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import (QAction, QApplication, QCheckBox, QComboBox,
QDialog, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit,
QMessageBox, QMenu, QPushButton, QSpinBox, QStyle, QSystemTrayIcon,
QTextEdit, QVBoxLayout)
import systray_rc
class Window(QDialog):
def __init__(self):
super(Window, self).__init__()
self.createIconGroupBox()
self.createMessageGroupBox()
self.iconLabel.setMinimumWidth(self.durationLabel.sizeHint().width())
self.createActions()
self.createTrayIcon()
self.showMessageButton.clicked.connect(self.showMessage)
self.showIconCheckBox.toggled.connect(self.trayIcon.setVisible)
self.iconComboBox.currentIndexChanged.connect(self.setIcon)
self.trayIcon.messageClicked.connect(self.messageClicked)
self.trayIcon.activated.connect(self.iconActivated)
mainLayout = QVBoxLayout()
mainLayout.addWidget(self.iconGroupBox)
mainLayout.addWidget(self.messageGroupBox)
self.setLayout(mainLayout)
self.iconComboBox.setCurrentIndex(1)
self.trayIcon.show()
self.setWindowTitle("Systray")
self.resize(400, 300)
def setVisible(self, visible):
self.minimizeAction.setEnabled(visible)
self.maximizeAction.setEnabled(not self.isMaximized())
self.restoreAction.setEnabled(self.isMaximized() or not visible)
super(Window, self).setVisible(visible)
def closeEvent(self, event):
if self.trayIcon.isVisible():
QMessageBox.information(self, "Systray",
"The program will keep running in the system tray. To "
"terminate the program, choose <b>Quit</b> in the "
"context menu of the system tray entry.")
self.hide()
event.ignore()
def setIcon(self, index):
icon = self.iconComboBox.itemIcon(index)
self.trayIcon.setIcon(icon)
self.setWindowIcon(icon)
self.trayIcon.setToolTip(self.iconComboBox.itemText(index))
def iconActivated(self, reason):
if reason in (QSystemTrayIcon.Trigger, QSystemTrayIcon.DoubleClick):
self.iconComboBox.setCurrentIndex(
(self.iconComboBox.currentIndex() + 1)
% self.iconComboBox.count())
elif reason == QSystemTrayIcon.MiddleClick:
self.showMessage()
def showMessage(self):
icon = QSystemTrayIcon.MessageIcon(
self.typeComboBox.itemData(self.typeComboBox.currentIndex()))
self.trayIcon.showMessage(self.titleEdit.text(),
self.bodyEdit.toPlainText(), icon,
self.durationSpinBox.value() * 1000)
def messageClicked(self):
QMessageBox.information(None, "Systray",
"Sorry, I already gave what help I could.\nMaybe you should "
"try asking a human?")
def createIconGroupBox(self):
self.iconGroupBox = QGroupBox("Tray Icon")
self.iconLabel = QLabel("Icon:")
self.iconComboBox = QComboBox()
self.iconComboBox.addItem(QIcon(':/images/bad.png'), "Bad")
self.iconComboBox.addItem(QIcon(':/images/heart.png'), "Heart")
self.iconComboBox.addItem(QIcon(':/images/trash.png'), "Trash")
self.showIconCheckBox = QCheckBox("Show icon")
self.showIconCheckBox.setChecked(True)
iconLayout = QHBoxLayout()
iconLayout.addWidget(self.iconLabel)
iconLayout.addWidget(self.iconComboBox)
iconLayout.addStretch()
iconLayout.addWidget(self.showIconCheckBox)
self.iconGroupBox.setLayout(iconLayout)
def createMessageGroupBox(self):
self.messageGroupBox = QGroupBox("Balloon Message")
typeLabel = QLabel("Type:")
self.typeComboBox = QComboBox()
self.typeComboBox.addItem("None", QSystemTrayIcon.NoIcon)
self.typeComboBox.addItem(self.style().standardIcon(
QStyle.SP_MessageBoxInformation), "Information",
QSystemTrayIcon.Information)
self.typeComboBox.addItem(self.style().standardIcon(
QStyle.SP_MessageBoxWarning), "Warning",
QSystemTrayIcon.Warning)
self.typeComboBox.addItem(self.style().standardIcon(
QStyle.SP_MessageBoxCritical), "Critical",
QSystemTrayIcon.Critical)
self.typeComboBox.setCurrentIndex(1)
self.durationLabel = QLabel("Duration:")
self.durationSpinBox = QSpinBox()
self.durationSpinBox.setRange(5, 60)
self.durationSpinBox.setSuffix(" s")
self.durationSpinBox.setValue(15)
durationWarningLabel = QLabel("(some systems might ignore this hint)")
durationWarningLabel.setIndent(10)
titleLabel = QLabel("Title:")
self.titleEdit = QLineEdit("Cannot connect to network")
bodyLabel = QLabel("Body:")
self.bodyEdit = QTextEdit()
self.bodyEdit.setPlainText("Don't believe me. Honestly, I don't have "
"a clue.\nClick this balloon for details.")
self.showMessageButton = QPushButton("Show Message")
self.showMessageButton.setDefault(True)
messageLayout = QGridLayout()
messageLayout.addWidget(typeLabel, 0, 0)
messageLayout.addWidget(self.typeComboBox, 0, 1, 1, 2)
messageLayout.addWidget(self.durationLabel, 1, 0)
messageLayout.addWidget(self.durationSpinBox, 1, 1)
messageLayout.addWidget(durationWarningLabel, 1, 2, 1, 3)
messageLayout.addWidget(titleLabel, 2, 0)
messageLayout.addWidget(self.titleEdit, 2, 1, 1, 4)
messageLayout.addWidget(bodyLabel, 3, 0)
messageLayout.addWidget(self.bodyEdit, 3, 1, 2, 4)
messageLayout.addWidget(self.showMessageButton, 5, 4)
messageLayout.setColumnStretch(3, 1)
messageLayout.setRowStretch(4, 1)
self.messageGroupBox.setLayout(messageLayout)
def createActions(self):
self.minimizeAction = QAction("Mi&nimize", self, triggered=self.hide)
self.maximizeAction = QAction("Ma&ximize", self,
triggered=self.showMaximized)
self.restoreAction = QAction("&Restore", self,
triggered=self.showNormal)
self.quitAction = QAction("&Quit", self,
triggered=QApplication.instance().quit)
def createTrayIcon(self):
self.trayIconMenu = QMenu(self)
self.trayIconMenu.addAction(self.minimizeAction)
self.trayIconMenu.addAction(self.maximizeAction)
self.trayIconMenu.addAction(self.restoreAction)
self.trayIconMenu.addSeparator()
self.trayIconMenu.addAction(self.quitAction)
self.trayIcon = QSystemTrayIcon(self)
self.trayIcon.setContextMenu(self.trayIconMenu)
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
if not QSystemTrayIcon.isSystemTrayAvailable():
QMessageBox.critical(None, "Systray",
"I couldn't detect any system tray on this system.")
sys.exit(1)
QApplication.setQuitOnLastWindowClosed(False)
window = Window()
window.show()
sys.exit(app.exec_())
|
<reponame>Artcs1/RotationDetection
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import numpy as np
from libs.configs._base_.models.retinanet_r50_fpn import *
from libs.configs._base_.datasets.dota_detection import *
from libs.configs._base_.schedules.schedule_1x import *
from dataloader.pretrained_weights.pretrain_zoo import PretrainModelZoo
# schedule
BATCH_SIZE = 1
GPU_GROUP = "0"
NUM_GPU = len(GPU_GROUP.strip().split(','))
LR = 1e-3
SAVE_WEIGHTS_INTE = 10000
DECAY_STEP = np.array(DECAY_EPOCH, np.int32) * SAVE_WEIGHTS_INTE
MAX_ITERATION = SAVE_WEIGHTS_INTE * MAX_EPOCH
WARM_SETP = int(WARM_EPOCH * SAVE_WEIGHTS_INTE)
# dataset
DATASET_NAME = 'MLT'
IMG_SHORT_SIDE_LEN = 800
IMG_MAX_LENGTH = 1000
CLASS_NUM = 1
# model
# backbone
pretrain_zoo = PretrainModelZoo()
PRETRAINED_CKPT = pretrain_zoo.pretrain_weight_path(NET_NAME, ROOT_PATH)
TRAINED_CKPT = os.path.join(ROOT_PATH, 'output/trained_weights')
# loss
REG_LOSS_MODE = None
# post-processing
VIS_SCORE = 0.2
VERSION = 'RetinaNet_MLT_1x_20201214'
"""
FLOPs: 472715443; Trainable params: 32325246
train/test
2020-12-15 retinanet_0.35 48.42% 67.07% 37.88% 32.80%
2020-12-15 retinanet_0.3 48.32% 62.18% 39.51% 33.85%
2020-12-15 retinanet_0.4 47.88% 71.14% 36.08% 31.55%
2020-12-15 retinanet_0.45 46.77% 74.59% 34.07% 30.08%
"""
|
<reponame>PepsRyuu/jscollab<gh_stars>1-10
let ws;
function fetch (url, options = {}) {
// Common metadata
options.headers = options.headers || {};
options.headers['X-Requested-With'] = 'XMLHttpRequest'; // Allows server to check for CSRF.
options.credentials = 'same-origin'; // append cookies
// Transform body into something consumable by server.
if (options.body) {
options.headers['Content-Type'] = 'application/json';
options.body = JSON.stringify(options.body);
}
return window.fetch(url, options).then(res => {
if (res.status >= 400) {
return res.text().then(msg => {
throw new Error(`Server Error: ` + msg);
});
}
let contentType = res.headers.get('Content-Type');
if (contentType && contentType.indexOf('application/json') > -1) {
return res.json();
}
});
}
let messageCallbacks = [];
let closeCallback;
export default class SocketManager {
static canJoinRoom (roomId) {
return fetch('/api/rooms/' + roomId);
}
static createRoom () {
return fetch('/api/create-room').then(res => {
return res.roomId;
});
}
static joinRoom (name, roomId) {
return new Promise(resolve => {
let protocol = window.location.protocol === 'https:'? 'wss:' : 'ws:';
ws = new WebSocket(protocol + '//' + window.location.host + '/api/ws/' + roomId + '/' + name);
ws.addEventListener('open', resolve);
ws.addEventListener('message', e => {
e = JSON.parse(e.data);
messageCallbacks.forEach(cb => cb(e));
});
ws.addEventListener('close', e => {
closeCallback();
});
});
}
static sendMessage (data) {
ws.send(JSON.stringify(data));
}
static onMessage (cb) {
messageCallbacks.push(cb);
return {
remove: () => {
let index = messageCallbacks.indexOf(cb);
messageCallbacks.splice(index, 1);
}
}
}
static onClose (cb) {
closeCallback = cb;
}
}
|
func configureCell(result: SearchResult) {
searchImageView.layer.cornerRadius = 5.0
searchImageView.layer.masksToBounds = true
if let url = URL(string: result.urls.thumb) {
self.searchImageView.kf.setImage(with: url)
}
}
|
import java.sql.*;
import javax.swing.*;
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
/*
* addproduct.java
*
* Created on Oct 30, 2017, 12:15:01 PM
*/
/**
*
* @author DRALL
*/
public class addproduct extends javax.swing.JFrame {
Connection con=null;
Statement stmt=null;
ResultSet rs=null;
String query;
int price=0;
String bcode="", pid="", pname="", category="";
String name,id;
/** Creates new form addproduct */
public addproduct() {
initComponents();
}
public addproduct(String n, String i) {
initComponents();
id=i;
name=n;
jTextField1.setText(id);
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jButton1 = new javax.swing.JButton();
jLabel1 = new javax.swing.JLabel();
jButton2 = new javax.swing.JButton();
jLabel2 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
jTextField1 = new javax.swing.JTextField();
jButton3 = new javax.swing.JButton();
jButton4 = new javax.swing.JButton();
jLabel4 = new javax.swing.JLabel();
jLabel5 = new javax.swing.JLabel();
jLabel6 = new javax.swing.JLabel();
jLabel7 = new javax.swing.JLabel();
jComboBox1 = new javax.swing.JComboBox();
jTextField2 = new javax.swing.JTextField();
jTextField3 = new javax.swing.JTextField();
jTextField4 = new javax.swing.JTextField();
jTextField5 = new javax.swing.JTextField();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
jButton1.setForeground(new java.awt.Color(255, 0, 0));
jButton1.setText("<");
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
jLabel1.setFont(new java.awt.Font("Tahoma", 0, 36));
jLabel1.setText("ADD PRODUCT");
jButton2.setForeground(new java.awt.Color(255, 0, 0));
jButton2.setText("X");
jButton2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton2ActionPerformed(evt);
}
});
jLabel2.setText("EMPLOYEE ID");
jLabel3.setText("BARCODE");
jTextField1.setEditable(false);
jTextField1.setHorizontalAlignment(javax.swing.JTextField.CENTER);
jTextField1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jTextField1ActionPerformed(evt);
}
});
jButton3.setText("ADD");
jButton3.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton3ActionPerformed(evt);
}
});
jButton4.setText("CLEAR");
jButton4.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton4ActionPerformed(evt);
}
});
jLabel4.setText("PRODUCT ID");
jLabel5.setText("NAME");
jLabel6.setText("PRICE");
jLabel7.setText("CATEGORY");
jComboBox1.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "FOOD", "ELECTRONICS", "FURNITURE", "ACCESSORIES", "STATIONARY" }));
jComboBox1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jComboBox1ActionPerformed(evt);
}
});
jTextField2.setHorizontalAlignment(javax.swing.JTextField.RIGHT);
jTextField3.setHorizontalAlignment(javax.swing.JTextField.RIGHT);
jTextField4.setHorizontalAlignment(javax.swing.JTextField.RIGHT);
jTextField5.setHorizontalAlignment(javax.swing.JTextField.RIGHT);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jButton1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 217, Short.MAX_VALUE)
.addComponent(jLabel1)
.addGap(231, 231, 231)
.addComponent(jButton2))
.addGroup(layout.createSequentialGroup()
.addGap(156, 156, 156)
.addComponent(jButton3, javax.swing.GroupLayout.PREFERRED_SIZE, 169, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(107, 107, 107)
.addComponent(jButton4, javax.swing.GroupLayout.PREFERRED_SIZE, 186, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(149, Short.MAX_VALUE))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addGap(110, 110, 110)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jLabel7, javax.swing.GroupLayout.PREFERRED_SIZE, 201, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel6, javax.swing.GroupLayout.PREFERRED_SIZE, 201, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel5, javax.swing.GroupLayout.PREFERRED_SIZE, 201, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel4, javax.swing.GroupLayout.PREFERRED_SIZE, 201, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel2, javax.swing.GroupLayout.PREFERRED_SIZE, 201, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel3, javax.swing.GroupLayout.PREFERRED_SIZE, 201, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jTextField5, javax.swing.GroupLayout.DEFAULT_SIZE, 347, Short.MAX_VALUE)
.addComponent(jTextField4, javax.swing.GroupLayout.DEFAULT_SIZE, 347, Short.MAX_VALUE)
.addComponent(jTextField3, javax.swing.GroupLayout.DEFAULT_SIZE, 347, Short.MAX_VALUE)
.addComponent(jTextField2, javax.swing.GroupLayout.DEFAULT_SIZE, 347, Short.MAX_VALUE)
.addComponent(jComboBox1, 0, 347, Short.MAX_VALUE)
.addComponent(jTextField1, javax.swing.GroupLayout.DEFAULT_SIZE, 347, Short.MAX_VALUE))
.addGap(105, 105, 105))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 492, Short.MAX_VALUE)
.addGap(0, 492, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(6, 6, 6)
.addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 68, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(37, 37, 37)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jTextField1, javax.swing.GroupLayout.DEFAULT_SIZE, 35, Short.MAX_VALUE)
.addComponent(jLabel2, javax.swing.GroupLayout.DEFAULT_SIZE, 35, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jTextField2, javax.swing.GroupLayout.DEFAULT_SIZE, 36, Short.MAX_VALUE)
.addComponent(jLabel3, javax.swing.GroupLayout.DEFAULT_SIZE, 36, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel4, javax.swing.GroupLayout.DEFAULT_SIZE, 36, Short.MAX_VALUE)
.addComponent(jTextField3, javax.swing.GroupLayout.DEFAULT_SIZE, 36, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel5, javax.swing.GroupLayout.DEFAULT_SIZE, 36, Short.MAX_VALUE)
.addComponent(jTextField4, javax.swing.GroupLayout.DEFAULT_SIZE, 36, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel6, javax.swing.GroupLayout.DEFAULT_SIZE, 36, Short.MAX_VALUE)
.addComponent(jTextField5, javax.swing.GroupLayout.DEFAULT_SIZE, 36, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel7, javax.swing.GroupLayout.DEFAULT_SIZE, 36, Short.MAX_VALUE)
.addComponent(jComboBox1, javax.swing.GroupLayout.PREFERRED_SIZE, 28, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(57, 57, 57)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jButton3, javax.swing.GroupLayout.DEFAULT_SIZE, 43, Short.MAX_VALUE)
.addComponent(jButton4, javax.swing.GroupLayout.DEFAULT_SIZE, 43, Short.MAX_VALUE))
.addGap(36, 36, 36))
.addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 39, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButton2, javax.swing.GroupLayout.PREFERRED_SIZE, 38, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
// TODO add your handling code here:
this.setVisible(false);
new employeehome(name,id).setVisible(true);
}//GEN-LAST:event_jButton1ActionPerformed
private void jButton2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton2ActionPerformed
// TODO add your handling code here:
System.exit(0);
}//GEN-LAST:event_jButton2ActionPerformed
private void jTextField1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jTextField1ActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_jTextField1ActionPerformed
private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed
// TODO add your handling code here:
jTextField2.setText("");
jTextField3.setText("");
jTextField4.setText("");
jTextField5.setText("");
}//GEN-LAST:event_jButton4ActionPerformed
private void jButton3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton3ActionPerformed
// TODO add your handling code here:
try {
bcode = jTextField2.getText();
pid = jTextField3.getText().toUpperCase();
pname = jTextField4.getText().toUpperCase();
price = Integer.parseInt(jTextField5.getText());
category = jComboBox1.getSelectedItem().toString();
Class.forName("java.sql.Driver");
con=DriverManager.getConnection("jdbc:mysql://localhost/shop","root","123");
stmt=con.createStatement();
query="select * from product where pid='"+pid+"';";
rs = stmt.executeQuery(query);
if(rs.next()) {
String n = rs.getString("pname");
String c = rs.getString("category");
String pd = rs.getString("pid");
int p = Integer.parseInt(rs.getString("pprice"));
if(pname.equals(n)==true && price==p && category.equals(c)==true){
int x =0;
query="select * from product where pid='"+pid+"';";
rs=stmt.executeQuery(query);
if(rs.next()) x=Integer.parseInt(rs.getString("qty"));
x=x+1;
query="insert into product values('"+bcode+"','"+pid+"','"+pname+"',"+price+",'"+category+"',"+x+",0,0);";
stmt.executeUpdate(query);
query="update product set qty="+x+" where pid='"+pid+"';";
stmt.executeUpdate(query);
JOptionPane.showMessageDialog(null,"YOU HAVE ADDED A PRODUCT!");
}
else{
JOptionPane.showMessageDialog(null,"Name, price & cateory enterered are wrong!");
}
} else {
int x =0;
query="select * from product where pid='"+pid+"';";
rs=stmt.executeQuery(query);
if(rs.next()) x=Integer.parseInt(rs.getString("qty"));
x=x+1;
query="insert into product values('"+bcode+"','"+pid+"','"+pname+"',"+price+",'"+category+"',"+x+",0,0);";
stmt.executeUpdate(query);
query="update product set qty="+x+" where pid='"+pid+"';";
stmt.executeUpdate(query);
JOptionPane.showMessageDialog(null,"YOU HAVE ADDED A PRODUCT!");
}
} catch(Exception e) {
JOptionPane.showMessageDialog(null,e.getMessage());
}
}//GEN-LAST:event_jButton3ActionPerformed
private void jComboBox1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jComboBox1ActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_jComboBox1ActionPerformed
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new addproduct().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton jButton1;
private javax.swing.JButton jButton2;
private javax.swing.JButton jButton3;
private javax.swing.JButton jButton4;
private javax.swing.JComboBox jComboBox1;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel6;
private javax.swing.JLabel jLabel7;
private javax.swing.JTextField jTextField1;
private javax.swing.JTextField jTextField2;
private javax.swing.JTextField jTextField3;
private javax.swing.JTextField jTextField4;
private javax.swing.JTextField jTextField5;
// End of variables declaration//GEN-END:variables
}
|
#!/usr/bin/env bash
# build.sh <action> ...
# ci_action: dependencies|generate|build|install
# ci_platform: windows|linux|macos|android|ios|web
# ci_arch: x86|x64|arm|arm64
# ci_compiler: msvc|gcc|gcc-*|clang|clang-*|mingw
# ci_build_type: dbg|rel
# ci_lib_type: lib|dll
# ci_source_dir: source code directory
# ci_build_dir: cmake cache directory
# ci_sdk_dir: sdk installation directory
ci_action=$1; shift;
ci_cmake_params_user="$@"
# default values
ci_compiler=${ci_compiler:-"default"}
ci_build_type=${ci_build_type:-"rel"}
ci_lib_type=${ci_lib_type:-"dll"}
# fix paths on windows by replacing \ with /.
ci_source_dir=$(echo $ci_source_dir | tr "\\" "/" 2>/dev/null)
ci_build_dir=$(echo $ci_build_dir | tr "\\" "/" 2>/dev/null)
ci_sdk_dir=$(echo $ci_sdk_dir | tr "\\" "/" 2>/dev/null)
ci_source_dir=${ci_source_dir%/}; # remove trailing slash if any
echo "ci_action=$ci_action"
echo "ci_platform=$ci_platform"
echo "ci_arch=$ci_arch"
echo "ci_compiler=$ci_compiler"
echo "ci_build_type=$ci_build_type"
echo "ci_lib_type=$ci_lib_type"
echo "ci_source_dir=$ci_source_dir"
echo "ci_build_dir=$ci_build_dir"
echo "ci_sdk_dir=$ci_sdk_dir"
declare -A types=(
[dbg]='Debug'
[rel]='Release'
)
declare -A android_types=(
[dbg]='assembleDebug'
[rel]='assembleRelease'
)
generators_windows_mingw=('-G' 'MinGW Makefiles')
generators_windows=('-G' 'Visual Studio 16 2019')
generators_uwp=('-G' 'Visual Studio 16 2019' '-DCMAKE_SYSTEM_NAME=WindowsStore' '-DCMAKE_SYSTEM_VERSION=10.0')
generators_linux=('-G' 'Ninja')
generators_web=('-G' 'Ninja')
generators_macos=('-G' 'Xcode' '-T' 'buildsystem=1')
generators_ios=('-G' 'Xcode' '-T' 'buildsystem=1')
toolchains_ios=(
'-DCMAKE_TOOLCHAIN_FILE=cmake/Toolchains/IOS.cmake'
'-DPLATFORM=SIMULATOR64'
'-DDEPLOYMENT_TARGET=11'
)
toolchains_web=(
'-DCMAKE_TOOLCHAIN_FILE=cmake/Toolchains/Emscripten.cmake'
'-DURHO3D_PROFILING=OFF'
)
lib_types_lib=('-DBUILD_SHARED_LIBS=OFF')
lib_types_dll=('-DBUILD_SHARED_LIBS=ON')
quirks_mingw=(
'-DURHO3D_PROFILING=OFF'
'-DURHO3D_CSHARP=OFF'
'-DURHO3D_TESTING=OFF'
)
quirks_ios=(
'-DURHO3D_CSHARP=OFF'
)
quirks_android=(
'-DURHO3D_CSHARP=OFF'
)
quirks_web=(
'-DURHO3D_PROFILING=OFF'
'-DURHO3D_CSHARP=OFF'
)
quirks_dll=('-DURHO3D_CSHARP=ON')
quirks_windows_msvc_x64=('-A' 'x64')
quirks_windows_msvc_x86=('-A' 'Win32')
quirks_uwo_msvc_x64=${quirks_windows_msvc_x64[*]}
quirks_uwo_msvc_x86=${quirks_windows_msvc_x86[*]}
quirks_uwp_msvc_arm=('-A' 'ARM')
quirks_uwp_msvc_arm64=('-A' 'ARM64')
quirks_clang=('-DTRACY_NO_PARALLEL_ALGORITHMS=ON') # Includes macos and ios
quirks_macos_x86=('-DCMAKE_OSX_ARCHITECTURES=i386')
quirks_macos_x64=('-DCMAKE_OSX_ARCHITECTURES=x86_64')
quirks_linux_x86=(
'-DCMAKE_C_FLAGS=-m32'
'-DCMAKE_CXX_FLAGS=-m32'
)
quirks_linux_x64=(
'-DCMAKE_C_FLAGS=-m64'
'-DCMAKE_CXX_FLAGS=-m64'
)
# Find msbuild.exe
MSBUILD=msbuild
if [[ "$ci_platform" == "windows" || "$ci_platform" == "uwp" ]];
then
MSBUILD=$(vswhere -products '*' -requires Microsoft.Component.MSBuild -property installationPath -latest)
MSBUILD=$(echo $MSBUILD | tr "\\" "/" 2>/dev/null) # Fix slashes
MSBUILD=$(echo $MSBUILD | sed "s/://" 2>/dev/null) # Remove :
MSBUILD="/$MSBUILD/MSBuild/Current/Bin/MSBuild.exe"
fi
function action-dependencies() {
if [[ "$ci_platform" == "linux" ]];
then
# Linux dependencies
dev_packages=(
libgl1-mesa-dev libxcursor-dev libxi-dev libxinerama-dev libxrandr-dev libxrender-dev libxss-dev
libasound2-dev libpulse-dev libibus-1.0-dev libdbus-1-dev libreadline6-dev libudev-dev uuid-dev libtbb-dev
)
if [[ "$ci_arch" != "x64" ]];
then
dev_packages[${#dev_packages[@]}]="binutils-multiarch"
dev_packages[${#dev_packages[@]}]="binutils-multiarch-dev"
dev_packages[${#dev_packages[@]}]="build-essential"
fi
# Per-arch compilers
case "$ci_arch" in
arm)
sudo dpkg --add-architecture armhf
dev_packages=("${dev_packages[@]/%/:armhf}")
dev_packages[${#dev_packages[@]}]="crossbuild-essential-armhf"
;;
arm64)
sudo dpkg --add-architecture arm64
sudo apt-get update
dev_packages=("${dev_packages[@]/%/:arm64}")
dev_packages[${#dev_packages[@]}]="crossbuild-essential-arm64"
;;
x86)
sudo dpkg --add-architecture i386
dev_packages=("${dev_packages[@]/%/:i386}")
dev_packages[${#dev_packages[@]}]="crossbuild-essential-i386"
;;
esac
# Common dependencies
sudo apt-get update
sudo apt-get install -y ninja-build ccache xvfb "${dev_packages[@]}"
elif [[ "$ci_platform" == "web" || "$ci_platform" == "android" ]];
then
# Web / android dependencies
sudo apt-get install -y --no-install-recommends uuid-dev ninja-build ccache
elif [[ "$ci_platform" == "macos" || "$ci_platform" == "ios" ]];
then
# iOS/MacOS dependencies
brew install pkg-config ccache
elif [[ "$ci_platform" == "windows" || "$ci_platform" == "uwp" ]];
then
# Windows dependencies
choco install -y ccache
pip install clcache
fi
}
function action-generate() {
# Change a default compiler.
case "$ci_platform-$ci_compiler" in
linux-clang*)
export CC=${ci_compiler} # clang or clang-XX
export CXX=${ci_compiler}++ # clang++ or clang-XX++
;;
linux-gcc*)
export CC=${ci_compiler} # gcc or gcc-XX
export CXX=${ci_compiler/gcc/g++} # g++ or g++-XX
;;
esac
# Generate.
mkdir $ci_build_dir
cd $ci_build_dir
ci_cmake_params=()
v="generators_${ci_platform}_${ci_compiler}[@]"; ci_cmake_params+=("${!v}")
if [[ -z "${!v}" ]];
then
v="generators_${ci_platform}[@]"; ci_cmake_params+=("${!v}")
fi
v="toolchains_${ci_platform}[@]"; ci_cmake_params+=("${!v}")
v="lib_types_${ci_lib_type}[@]"; ci_cmake_params+=("${!v}")
v="quirks_${ci_compiler}[@]"; ci_cmake_params+=("${!v}")
v="quirks_${ci_lib_type}[@]"; ci_cmake_params+=("${!v}")
v="quirks_${ci_platform}_${ci_compiler}_${ci_arch}[@]"; ci_cmake_params+=("${!v}")
v="quirks_${ci_platform}_${ci_arch}[@]"; ci_cmake_params+=("${!v}")
v="quirks_${ci_platform}[@]"; ci_cmake_params+=("${!v}")
ci_cmake_params+=(
"-DCMAKE_BUILD_TYPE=${types[$ci_build_type]}"
"-DCMAKE_INSTALL_PREFIX=$ci_sdk_dir"
)
if [[ "$ci_compiler" != "msvc" ]];
then
ci_cmake_params+=(
"-DCMAKE_C_COMPILER_LAUNCHER=ccache"
"-DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
)
fi
ci_cmake_params+=(${ci_cmake_params_user[@]})
ci_cmake_params+=("$ci_source_dir")
echo "${ci_cmake_params[@]}"
cmake "${ci_cmake_params[@]}"
}
# Default build path using plain CMake.
function action-build() {
cd $ci_build_dir
cmake --build . --config "${types[$ci_build_type]}" && \
ccache -s
}
# Custom compiler build paths used only on windows.
function action-build-msvc() {
cd $ci_build_dir
# Invoke msbuild directly when using msvc. Invoking msbuild through cmake causes some custom target dependencies to not be respected.
python_path=$(python -c "import os, sys; print(os.path.dirname(sys.executable))")
"$MSBUILD" "-r" "-p:Configuration=${types[$ci_build_type]}" "-p:TrackFileAccess=false" "-p:CLToolExe=clcache.exe" "-p:CLToolPath=$python_path/Scripts/" *.sln && \
clcache -s
}
function action-build-mingw() {
action-build # Default build using CMake.
}
# Custom platform build paths used only on android.
function action-build-android() {
cd $ci_source_dir/android
gradle wrapper && \
./gradlew "${android_types[$ci_build_type]}" && \
ccache --show-stats
}
function action-install() {
cd $ci_build_dir
cmake --install . --config "${types[$ci_build_type]}"
}
# Invoke requested action.
action-$ci_action
|
<gh_stars>0
const asyncWrapper = require('../middleware/asyncWrapper');
const ErrorResponse = require('../utils/ErrorResponse');
const Weather = require('../models/Weather');
const getExternalWeather = require('../utils/getExternalWeather');
// @desc Get latest weather status
// @route GET /api/weather
// @access Public
exports.getWeather = asyncWrapper(async (req, res, next) => {
const weather = await Weather.findAll({
order: [['createdAt', 'DESC']],
limit: 1
});
res.status(200).json({
success: true,
data: weather
})
})
// @desc Update weather
// @route GET /api/weather/update
// @access Public
exports.updateWeather = asyncWrapper(async (req, res, next) => {
const weather = await getExternalWeather();
res.status(200).json({
success: true,
data: weather
})
})
|
#!/bin/bash
# Set the npm registry auth token
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" > ~/.npmrc
# Grab the local version and check to see if that version exists on npm
agentAlreadyReleased=$(npm view @percy/agent versions | grep $(node -p "require('./package.json').version"))
# If the package with that version has not yet been released, go ahead and release it.
if [ !$agentAlreadyReleased ]; then
npm publish
else
echo "Skipping @percy/agent publishing because the desired version has already been published."
fi
# Ship the `percy` shadow package
# https://www.npmjs.com/package/percy
# First we jam `percy` into the package name
sed -i 's/@percy\/agent/percy/g' package.json
# Next we grab the local version and check to see if that version exists on npm
percyAlreadyReleased=$(npm view percy versions | grep $(node -p "require('./package.json').version"))
# If the package with that version has not yet been released, go ahead and release it.
if [ !$percyAlreadyReleased ]; then
npm publish
else
echo "Skipping percy publishing because the desired version has already been published."
fi
|
def unique_values(example_list):
"""
Returns a list of unique values from a given list.
Args:
example_list (list): list containing duplicate values
Returns:
list: list containing only unique values
"""
unique = []
for num in example_list:
# Add only if it is not already in the list
if num not in unique:
unique.append(num)
return unique
example_list = [1,2,3,3,3,2,2,1]
unique_values(example_list)
|
<gh_stars>1-10
package com.touch.air.mall.member.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.touch.air.common.utils.PageUtils;
import com.touch.air.mall.member.entity.MemberReceiveAddressEntity;
import java.util.List;
import java.util.Map;
/**
* 会员收货地址
*
* @author bin.wang
* @email <EMAIL>
* @date 2020-12-04 14:18:41
*/
public interface MemberReceiveAddressService extends IService<MemberReceiveAddressEntity> {
PageUtils queryPage(Map<String, Object> params);
List<MemberReceiveAddressEntity> getAddresses(Long memberId);
}
|
def extract_emails(string):
'''This function generates a regular expression to extract emails from a given string'''
# Generate an expression to search for emails
regex = re.compile(r"\b[\w.-]+@[\w.-]+\.\w+\b")
# Search the string for emails
emails = regex.findall(string)
# Return the list of emails
return emails
|
#!/bin/bash
# Copyright (c) 2013 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
if [ -d "$1" ]; then
cd "$1" || exit 1
else
echo "Usage: $0 <datadir>" >&2
echo "Removes obsolete Thehashcoin database files" >&2
exit 1
fi
LEVEL=0
if [ -f wallet.dat -a -f addr.dat -a -f blkindex.dat -a -f blk0001.dat ]; then LEVEL=1; fi
if [ -f wallet.dat -a -f peers.dat -a -f blkindex.dat -a -f blk0001.dat ]; then LEVEL=2; fi
if [ -f wallet.dat -a -f peers.dat -a -f coins/CURRENT -a -f blktree/CURRENT -a -f blocks/blk00000.dat ]; then LEVEL=3; fi
if [ -f wallet.dat -a -f peers.dat -a -f chainstate/CURRENT -a -f blocks/index/CURRENT -a -f blocks/blk00000.dat ]; then LEVEL=4; fi
case $LEVEL in
0)
echo "Error: no Thehashcoin datadir detected."
exit 1
;;
1)
echo "Detected old Thehashcoin datadir (before 0.7)."
echo "Nothing to do."
exit 0
;;
2)
echo "Detected Thehashcoin 0.7 datadir."
;;
3)
echo "Detected Thehashcoin pre-0.8 datadir."
;;
4)
echo "Detected Thehashcoin 0.8 datadir."
;;
esac
FILES=""
DIRS=""
if [ $LEVEL -ge 3 ]; then FILES=$(echo $FILES blk????.dat blkindex.dat); fi
if [ $LEVEL -ge 2 ]; then FILES=$(echo $FILES addr.dat); fi
if [ $LEVEL -ge 4 ]; then DIRS=$(echo $DIRS coins blktree); fi
for FILE in $FILES; do
if [ -f $FILE ]; then
echo "Deleting: $FILE"
rm -f $FILE
fi
done
for DIR in $DIRS; do
if [ -d $DIR ]; then
echo "Deleting: $DIR/"
rm -rf $DIR
fi
done
echo "Done."
|
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import server.controlleurs.ServeurControlleur;
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author David
*/
public class Starter {
public static void main(String[] args) {
ServeurControlleur s = new ServeurControlleur();
System.out.println(">> Serveur a démarré");
s.lancerServeur();
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.listUnordered = void 0;
var listUnordered = {
"viewBox": "0 0 12 16",
"children": [{
"name": "path",
"attribs": {
"fill-rule": "evenodd",
"d": "M2 13c0 .59 0 1-.59 1H.59C0 14 0 13.59 0 13c0-.59 0-1 .59-1h.81c.59 0 .59.41.59 1H2zm2.59-9h6.81c.59 0 .59-.41.59-1 0-.59 0-1-.59-1H4.59C4 2 4 2.41 4 3c0 .59 0 1 .59 1zM1.41 7H.59C0 7 0 7.41 0 8c0 .59 0 1 .59 1h.81c.59 0 .59-.41.59-1 0-.59 0-1-.59-1h.01zm0-5H.59C0 2 0 2.41 0 3c0 .59 0 1 .59 1h.81c.59 0 .59-.41.59-1 0-.59 0-1-.59-1h.01zm10 5H4.59C4 7 4 7.41 4 8c0 .59 0 1 .59 1h6.81c.59 0 .59-.41.59-1 0-.59 0-1-.59-1h.01zm0 5H4.59C4 12 4 12.41 4 13c0 .59 0 1 .59 1h6.81c.59 0 .59-.41.59-1 0-.59 0-1-.59-1h.01z"
},
"children": []
}],
"attribs": {}
};
exports.listUnordered = listUnordered;
|
<reponame>yzh1234567/vy-element
import vyTree from "./src/tree.vue"
vyTree.install=function(Vue){
Vue.component(vyTree.name,vyTree)
}
export default vyTree
|
<gh_stars>0
# Generated by Django 2.2.4 on 2019-08-19 14:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('antiqueProjectApp', '0016_auto_20190816_1200'),
]
operations = [
migrations.AddField(
model_name='antiquesale',
name='stripeId',
field=models.CharField(max_length=100, null=True),
),
migrations.AlterField(
model_name='antique',
name='AntiqueType',
field=models.ManyToManyField(help_text='Select a type for this antique', to='antiqueProjectApp.AntiqueType'),
),
]
|
# zshide: the Zsh IDE
#
# Creates a repository on GitHub, clones it and updates .gitignore. It is
# called from the np (new project) command.
#
# Requirements:
# GITHUB_TOKEN (zshiderc)
# PROJECT_NAME
#
# Author: Lorenzo Cabrini <lorenzo.cabrini@gmail.com>
. $ZI_HOME/util.zsh
# Defaults
PROJECT_DESCRIPTION="Created by zshide"
while (( $# )); do
#for arg in $@; do
if [[ $1 =~ .+=.+ ]]; then
key=${1%=*}
val=${1#*=}
eval "PROJECT_${(U)key}='$val'"
shift
else
err "cannot handle $arg: is not a key-value pair."
# TODO: for now we just continue. Should we exit on bad args?
shift
continue
fi
done
if [[ -z $PROJECT_NAME ]]; then
err "no repository name"
exit 1
fi
. $ZI_HOME/github.zsh
url="$URL/user/repos"
# TODO: several (all?) of these should be configurable
#read -rd '' repodata <<EOF
#{
#"name": "$PROJECT_NAME",
#"description": "$PROJECT_DESCRIPTION",
#"private": false,
#"has_issues": true,
#"has_projects": false,
#"has_wiki": true,
#"license_template": "mit",
#"auto_init": true
#}
#EOF
#read -rd '' repodata < $ZI_HOME/github/create-repo.json
#repodata=$(print $repodata | tr -d '\n')
while read line; do
#print "I got: $line"
#for sub in NAME DESCRIPTION; do
# s=PROJECT_$sub
# line=${line/@${s}@/${(P)s}}
#done
#line=${line:gs/@PROJECT_NAME@/$PROJECT_NAME/}
repodata=$repodata$line
done < $ZI_HOME/github/create-repo.json
for sub in NAME DESCRIPTION; do
s=PROJECT_$sub
repodata=${repodata//@${s}@/${(P)s}}
done
#print $repodata
#exit
response=$(eval "$CURL $HEADERS -d '$repodata' $url")
json=$(print $response | sed '1,/^\s*$/d')
state=$(print $response | grep ^Status: | awk '{ print $2 }')
case $state in
(201)
info "GitHub repo $PROJECT_NAME created."
;;
(*)
err "GitHub repo creation failed"
exit 1
;;
esac
REPO_URL=$(print $json | jq '.ssh_url' | tr -d '"')
info "created GitHub repo $PROJECT_NAME"
(cd $PROJECTS_DIR && git clone $REPO_URL > /dev/null 2>&1)
if [[ -d $PROJECTS_DIR/$PROJECT_NAME ]]; then
info "cloned $PROJECT_NAME into $PROJECTS_DIR/$PROJECT_NAME"
else
err "failed to clone $PROJECT_NAME"
exit 1
fi
# TODO: how can I get a project types .gitignore from GitHub?
cat >> $PROJECTS_DIR/$PROJECT_NAME/.gitignore <<EOF
# Editor
.*.swp
EOF
info "applied general .gitignore"
rm $ZI_HOME/github-repos.json
. $ZI_HOME/github-get-repos.zsh
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 9 10:34:05 2020
@author: RMS671214
"""
from faspy.basel.credit.credit import calc_prob_default, \
prob_default_interpolation as pdi, expected_loss, \
mean_rr_with_betadist
import pandas as pd
# %%
spreads = []
spreads.append({"tenor": 1, "spread": 50, "rec_rate": 0.4})
spreads.append({"tenor": 2, "spread": 70, "rec_rate": 0.4})
spreads.append({"tenor": 3, "spread": 100, "rec_rate": 0.4})
spreads.append({"tenor": 5, "spread": 150, "rec_rate": 0.4})
spreads.append({"tenor": 10, "spread": 200, "rec_rate": 0.4})
spreads.append({"tenor": 20, "spread": 350, "rec_rate": 0.4})
print(spreads)
defaults = calc_prob_default(spreads)
pd_def = pd.DataFrame(defaults)
tenors = list(range(20))
print(tenors)
interp = pdi(defaults, tenors)
pd_interp = pd.DataFrame(interp)
# %%
# Test expected Loss
prob_default = 0.5
exposure_at_default = 1_000_000
rec_rate= 0.40
loss_given_default = None
eloss = expected_loss(prob_default, exposure_at_default, recovery_rate=rec_rate,
loss_given_default=loss_given_default)
print(eloss)
# %%
rr = mean_rr_with_betadist(0.5, 1)
|
#!/bin/bash
#Usage:
# util-job-executions.sh <URL> <jobid> [status]
DIR=$(cd `dirname $0` && pwd)
source $DIR/include.sh
jobid=$1
shift
state=$1
shift
# now submit req
runurl="${APIURL}/job/${jobid}/executions"
echo "# Listing Executions for job ${jobid}..."
params="status=${state}"
echo "url: ${runurl}?${params}"
# get listing
docurl ${runurl}?${params} > $DIR/curl.out || fail "failed request: ${runurl}"
sh $DIR/api-test-success.sh $DIR/curl.out || exit 2
#Check projects list
itemcount=$(xmlsel "/result/executions/@count" $DIR/curl.out)
echo "$itemcount Executions"
if [ "0" != "$itemcount" ] ; then
#echo all on one line
$XMLSTARLET sel -T -t -m "/result/executions/execution" -o "[" -v "@id" -o "](" -v "@status" -o ") " -v "description" -o ": " -v "job/name" -n $DIR/curl.out
fi
#rm $DIR/curl.out
|
var crypto = require("crypto");
exports.install = function(self)
{
var tokens = {};
self.token = function(token, callback)
{
if(typeof token == "function")
{
callback = token;
token = false;
}
if(!token)
{
var bytes = new Buffer(self.hashname,"hex");
var rand = new Buffer(self.randomHEX(8),"hex");
var hash = crypto.createHash("sha256").update(Buffer.concat([bytes,rand])).digest();
token = Buffer.concat([bytes.slice(0,16),rand,hash.slice(0,8)]).toString("hex");
}
if(callback) tokens[token] = callback;
return token;
}
self.raws["token"] = function(err, packet, chan)
{
if(err) return;
var self = packet.from.self;
// ensure valid request
if(!self.isHashname(packet.js.token)) return chan.err("invalid");
if(tokens[packet.js.token])
{
tokens[packet.js.token](packet.from);
return chan.send({js:{end:true}});
}
if(!self.tokens) return chan.err("unknown");
self.tokens(packet.js.token, packet.from, function(err){
if(err) chan.err(err);
can.send({js:{end:true}});
});
}
self.dispense = function(token, callback)
{
self.seek(token, function(err, see){
if(!Array.isArray(see)) return callback(err||"not found");
var match;
see.forEach(function(hn){
if(hn.substr(0,32) != token.substr(0,32)) return;
var bytes = new Buffer(hn,"hex");
var rand = new Buffer(token.substr(32,16),"hex");
var hash = crypto.createHash("sha256").update(Buffer.concat([bytes,rand])).digest();
token2 = Buffer.concat([bytes.slice(0,16),rand,hash.slice(0,8)]).toString("hex");
if(token == token2) match = hn;
});
if(!match || !(match = self.whois(match))) return callback("not found");
match.raw("token",{js:{token:token},retries:3}, function(err){
callback((err !== true)?err:false, match);
});
});
}
}
|
#!/bin/bash
set -e
# read -p "请输入Commit Message:" MESSAGE;
# npm run build:docs
npm run build:example
# echo 'xform.imdo.me' > ./docs/CNAME
# git add .
# git commit -m "docs: $MESSAGE"
|
<gh_stars>1-10
/*
* #%L
* none: runtime code modification by annotation idioms.
* %%
* Copyright (C) 2014 <NAME>.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package none.agent;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import none.annotation.ByTypeOf;
import none.annotation.Instantiate;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.Label;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.commons.GeneratorAdapter;
import org.objectweb.asm.commons.Method;
import org.objectweb.asm.tree.AbstractInsnNode;
import org.objectweb.asm.tree.AnnotationNode;
import org.objectweb.asm.tree.ClassNode;
import org.objectweb.asm.tree.FieldInsnNode;
import org.objectweb.asm.tree.LabelNode;
import org.objectweb.asm.tree.LocalVariableNode;
import org.objectweb.asm.tree.MethodNode;
import org.objectweb.asm.tree.VarInsnNode;
public class InstantiateTransform implements Opcodes
{
public static MethodNode transform( final MethodNode mn, final Type type, final int version, final String source, final String debug, final int interfaceIndex )
throws IllegalAccessException, IllegalArgumentException, InvocationTargetException
{
Util.removeMethodAnnotation( mn, Instantiate.class, false );
final Class< ? > interfaceClass = createInstatiateInterfaceBytes( mn, type, version, interfaceIndex ).load();
final InnerClassTemplate template = InstantiateTransform.createInstantiatePrototypeTemplate( mn, type, version, interfaceIndex, source, debug );
RuntimeBuilder.add( interfaceClass, template );
return InstantiateTransform.transformToInstantiatorMethod( mn, template );
}
static final String interfaceNameFormat = "%s$__none__I%d";
static final String impClassNameFormatFormat = "%s$__none__I%dC%%d";
/**
* Create an interface containing one method of the same name, signature,
* and return type as m.
*
* The interface is made an inner class of <em>className</em>, named "
* <em>className</em>$__none__I<em>interfaceIndex</em>".
*
* @param m
* the method to be rewritten.
* @param outerClassName
* internal name of the outer class
* @param classVersion
* bytecode version of the outer class. We will use the same
* version for generated code.
* @param interfaceIndex
* The outer class might have several methods that need to be
* rewritten. This index is used to make different names for the
* generated interface.
*/
public static ClassBytes createInstatiateInterfaceBytes( final MethodNode m, final Type outerClassType, final int classVersion, final int interfaceIndex )
{
final String interfaceName = String.format( interfaceNameFormat, outerClassType.getInternalName(), interfaceIndex );
final String name = m.name;
final String desc = m.desc;
final String signature = m.signature;
final String[] exceptions = ( String[] ) m.exceptions.toArray( new String[ 0 ] );
final ClassWriter cw = new ClassWriter( 0 );
cw.visit( classVersion, ACC_PUBLIC + ACC_ABSTRACT + ACC_INTERFACE, interfaceName, null, "java/lang/Object", null );
cw.visitMethod( ACC_PUBLIC + ACC_ABSTRACT, name, desc, signature, exceptions ).visitEnd();
cw.visitEnd();
// System.out.println( "createInstatiateInterfaceBytes: interfaceName = " + interfaceName );
// System.out.println( "createInstatiateInterfaceBytes: Type.getObjectType( interfaceName ).getClassName() = " + Type.getObjectType( interfaceName ).getClassName() );
return new ClassBytes( Type.getObjectType( interfaceName ).getClassName(), cw.toByteArray() );
}
/**
* Create a {@link InnerClassTemplate} containing a default constructor and
* implementing the interface generated by
* {@link #createInstatiateInterfaceBytes(MethodNode, Type, int, int)}. The
* interface is implemented using the code from the method {@code m}. If
* {@code m} is a non-static method, field and method accesses are rewritten
* to account for the fact that the code has moved to an inner class.
*
* @param m
* the code, name, and description of the method to implement.
* @param outerClassType
* the type of the outer class.
* @param classVersion
* the class version of the outer class. This will be used for
* the inner class as well.
* @param interfaceIndex
* the index of the generated interface (see
* {@link #createInstatiateInterfaceBytes(MethodNode, Type, int, int)}
* ).
* @param sourceFile
* Source file of the outer class. This is set as the source file
* for the inner class as well, such that debugging into the
* rewritten method works.
* @param sourceDebug
* @return a template that can be
* {@link InnerClassTemplate#instantiate(int) instantiated} to
* generate new implementations of the
* {@link #createInstatiateInterfaceBytes(MethodNode, Type, int, int)
* generated interface.}
*/
public static InnerClassTemplate createInstantiatePrototypeTemplate(
MethodNode m,
final Type outerClassType,
final int classVersion,
final int interfaceIndex,
final String sourceFile,
final String sourceDebug )
{
m = Util.copy( m );
final boolean isStatic = ( ( m.access & ACC_STATIC ) != 0 );
final String interfaceName = String.format( interfaceNameFormat, outerClassType.getInternalName(), interfaceIndex );
final ArrayList<FieldInsnNode> templateFieldInsnNodes;
final ArrayList<LocalVariableNode> templateLocalVariableNodes = new ArrayList< LocalVariableNode >();
if ( isStatic )
{
templateFieldInsnNodes = null;
transformStaticToInnerClassMethod( m, outerClassType, templateLocalVariableNodes );
}
else
{
templateFieldInsnNodes = new ArrayList< FieldInsnNode >();
transformToInnerClassMethod( m, outerClassType, templateFieldInsnNodes );
}
final ClassNode cn = new ClassNode( Agent.ASM_API );
cn.visit( classVersion, ACC_PUBLIC + ACC_SUPER, "", null, "java/lang/Object", new String[] { interfaceName } );
cn.visitSource( sourceFile, sourceDebug );
cn.visitField( ACC_FINAL + ACC_SYNTHETIC, "this$0", outerClassType.getDescriptor(), null, null ).visitEnd();
final MethodNode mConstructor;
if ( isStatic )
{
mConstructor = new MethodNode( ACC_PUBLIC, "<init>", Type.getMethodDescriptor( Type.VOID_TYPE ), null, null );
final LabelNode l0 = new LabelNode();
mConstructor.instructions.add( l0 );
mConstructor.visitVarInsn( ALOAD, 0 );
mConstructor.visitMethodInsn( INVOKESPECIAL, "java/lang/Object", "<init>", "()V", false );
mConstructor.visitInsn( RETURN );
final LabelNode l1 = new LabelNode();
mConstructor.instructions.add( l1 );
mConstructor.visitMaxs( 1, 1 );
}
else
{
mConstructor = new MethodNode( ACC_PUBLIC, "<init>", Type.getMethodDescriptor( Type.VOID_TYPE, outerClassType ), null, null );
final LabelNode l0 = new LabelNode();
mConstructor.instructions.add( l0 );
mConstructor.visitVarInsn( ALOAD, 0 );
mConstructor.visitVarInsn( ALOAD, 1 );
final FieldInsnNode n = new FieldInsnNode( PUTFIELD, "", "this$0", outerClassType.getDescriptor() );
templateFieldInsnNodes.add( n );
mConstructor.instructions.add( n );
mConstructor.visitVarInsn( ALOAD, 0 );
mConstructor.visitMethodInsn( INVOKESPECIAL, "java/lang/Object", "<init>", "()V", false );
mConstructor.visitInsn( RETURN );
final LabelNode l1 = new LabelNode();
mConstructor.instructions.add( l1 );
final LocalVariableNode lv = new LocalVariableNode( "this", "", null, l0, l1, 0 );
templateLocalVariableNodes.add( lv );
mConstructor.localVariables.add( lv );
mConstructor.visitMaxs( 2, 2 );
}
cn.methods.add( mConstructor );
cn.methods.add( m );
final String impClassNameFormat = String.format( impClassNameFormatFormat, outerClassType.getInternalName(), interfaceIndex );
return new InnerClassTemplate( cn, templateFieldInsnNodes, templateLocalVariableNodes, impClassNameFormat );
}
/**
* Transform a non-static {@link MethodNode} to make the method a method of an inner
* class. All references to {@code this} in the method are rewritten to @{code this$0}.
* The method access is set to public.
*
* @param m
* method to transform.
* @param outerClassType
* the type of the outer class.
* @param templateFieldInsnNodes
* a list of {@link FieldInsnNode} in the code. When putting the
* method into a concrete inner class, the
* {@link FieldInsnNode#owner} field of these nodes must be set
* to the concrete inner class.
*/
private static void transformToInnerClassMethod(
final MethodNode m,
final Type outerClassType,
final ArrayList<FieldInsnNode> templateFieldInsnNodes )
{
m.access |= ACC_PUBLIC;
m.access &= ~( ACC_PROTECTED | ACC_PRIVATE );
// after each "ALOAD 0", append a "GETFIELD this$0".
// This replaces the "this" on the stack by "this$0" which is required because we are in a inner class.
final ListIterator< AbstractInsnNode > iter = m.instructions.iterator();
while( iter.hasNext() )
{
final AbstractInsnNode insn = iter.next();
if ( insn instanceof VarInsnNode )
{
final VarInsnNode varinsn = ( VarInsnNode ) insn;
if( varinsn.getOpcode() == ALOAD && varinsn.var == 0 )
{
final FieldInsnNode n = new FieldInsnNode( GETFIELD, "", "this$0", outerClassType.getDescriptor() );
templateFieldInsnNodes.add( n );
iter.add( n );
}
}
}
}
/**
* Transform a static {@link MethodNode} to make the method a method of an
* inner class. The static method is made into a non-static method. A
* {@code this} field is inserted as local variable 0. The the local
* variable index of all variables and all variable access instructions is
* incemented by 1 to account for the {@code this} field. The method access
* is set to public.
*
* @param m
* method to transform.
* @param outerClassType
* the type of the outer class.
* @param templateLocalVariableNodes
* a list of {@link LocalVariableNode} in the code. When putting
* the method into a concrete inner class, the
* {@link LocalVariableNode#desc} field of these nodes must be
* set to the descriptor of the concrete inner class.
*/
private static void transformStaticToInnerClassMethod(
final MethodNode m,
final Type outerClassType,
final ArrayList<LocalVariableNode> templateLocalVariableNodes )
{
m.access |= ACC_PUBLIC;
m.access &= ~( ACC_PROTECTED | ACC_PRIVATE | ACC_STATIC );
// shift the local variable index of all variable access instructions by +1.
// this accounts for the "this" field we need to insert.
final ListIterator< AbstractInsnNode > iter = m.instructions.iterator();
while( iter.hasNext() )
{
final AbstractInsnNode insn = iter.next();
if ( insn instanceof VarInsnNode )
{
final VarInsnNode varinsn = ( VarInsnNode ) insn;
varinsn.var += 1;
}
}
// shift the local variable index of all local variables by +1.
// this accounts for the "this" field we need to insert.
final Iterator< LocalVariableNode > viter = m.localVariables.iterator();
while( viter.hasNext() )
viter.next().index++;
// insert "this" local variable at index 0
final LocalVariableNode arg0 = ( LocalVariableNode ) m.localVariables.get( 0 );
final LocalVariableNode vn = new LocalVariableNode( "this", "", null, arg0.start, arg0.end, 0 );
templateLocalVariableNodes.add( vn );
m.localVariables.add( 0, vn );
m.maxLocals++;
}
/**
* Replace the code in {@code m} by a call to
* {@link RuntimeBuilder#getInnerClassInstance(Object, Object[])}, casting
* the result to the
* {@link #createInstatiateInterfaceBytes(MethodNode, Type, int, int)
* generated interface}, and calling the generated method.
*
* @param m
* node to transform.
* @param template
* use the {@link InnerClassTemplate#getInterfaceName()
* interface} implemented by this {@link InnerClassTemplate}.
* @return the transformed node {@code m}.
*/
public static MethodNode transformToInstantiatorMethod( final MethodNode m, final InnerClassTemplate template )
{
System.out.println( InstantiateTransform.class.getSimpleName() + ".transformToInstantiatorMethod( " + m.name + " " + m.desc + " --> " + template.getInterfaceName() + " )" );
final String interfaceName = template.getInterfaceName();
final Type interfaceType = Type.getType( interfaceName );
final Method interfaceMethod = new Method( m.name, m.desc );
final boolean isStatic = ( ( m.access & ACC_STATIC ) != 0 );
final Type[] argumentTypes = interfaceMethod.getArgumentTypes();
final int numArguments = argumentTypes.length;
final int argSize = ( Type.getArgumentsAndReturnSizes( interfaceMethod.getDescriptor() ) >> 2 ) - ( isStatic ? 1 : 0 );
// find parameters annotated with @ByTypeOf
final int[] byTypeOfArguments = new int[ numArguments ];
int numByTypeOfArguments = 0;
final List< AnnotationNode >[] invisibleParameterAnnotations = m.invisibleParameterAnnotations;
if ( invisibleParameterAnnotations != null )
{
for ( int pi = 0; pi < invisibleParameterAnnotations.length; ++pi )
{
final List< AnnotationNode > alist = invisibleParameterAnnotations[ pi ];
if ( alist != null )
{
final Iterator< AnnotationNode > iter = alist.iterator();
while ( iter.hasNext() )
if ( iter.next().desc.equals( Type.getDescriptor( ByTypeOf.class ) ) )
{
if ( argumentTypes[ pi ].getSort() == Type.OBJECT )
byTypeOfArguments[ numByTypeOfArguments++ ] = pi;
else
System.err.println( "Ignored @ByTypeOf annotation. (Only supported on Object parameters)." );
}
}
}
}
m.instructions.clear();
final GeneratorAdapter g = new GeneratorAdapter( m, m.access, m.name, m.desc );
final int __none__P__localVariableIndex = g.newLocal( Type.getType( Object[].class ) );
final int __none__O__localVariableIndex = g.newLocal( interfaceType );
final Label l0 = g.mark();
// this creates Object[] __none__P which will contain parameters for switching implementation
g.push( numByTypeOfArguments );
g.newArray( Type.getType( Object.class ) );
g.storeLocal( __none__P__localVariableIndex );
final Label l1 = g.mark();
// put the @ByTypeOf annotated parameters into Object[] __none__P array
for ( int i = 0; i < numByTypeOfArguments; ++i )
{
g.loadLocal( __none__P__localVariableIndex );
g.push( i );
g.loadArg( byTypeOfArguments[ i ] );
g.visitInsn( AASTORE );
}
// get a generated class instance appropriate for the concrete types in __none__P
if ( isStatic )
{
// if m is a static method, invoke RuntimeBuilder.getInnerClassInstance(__none__P);
g.push( Type.getObjectType( interfaceName ) );
g.loadLocal( __none__P__localVariableIndex );
g.invokeStatic( Type.getType( RuntimeBuilder.class ), new Method( "getInnerClassInstance", "(Ljava/lang/Class;[Ljava/lang/Object;)Ljava/lang/Object;" ) );
}
else
{
// if m is a non-static method, invoke RuntimeBuilder.getInnerClassInstance(this, __none__P);
g.loadThis();
g.push( Type.getObjectType( interfaceName ) );
g.loadLocal( __none__P__localVariableIndex );
g.invokeStatic( Type.getType( RuntimeBuilder.class ), new Method( "getInnerClassInstance", "(Ljava/lang/Object;Ljava/lang/Class;[Ljava/lang/Object;)Ljava/lang/Object;" ) );
}
// cast to generated interface type and store in local variable __none__O
g.checkCast( interfaceType );
g.storeLocal( __none__O__localVariableIndex );
final Label l2 = g.mark();
// invoke __none__O.<methodName>(...), which is basically a copy of the
// code that was originally contained in m.
g.loadLocal( __none__O__localVariableIndex );
g.loadArgs();
g.invokeInterface( interfaceType, interfaceMethod );
g.returnValue();
final Label l3 = g.mark();
g.endMethod();
g.visitMaxs( Math.max( 3, argSize + ( isStatic ? 1 : 0 ) ), 0 );
g.visitEnd();
// for existing local variables, set begin and end to l0 and l3
final Iterator< LocalVariableNode > vi = m.localVariables.iterator();
m.localVariables = new ArrayList< LocalVariableNode >();
for ( int i = 0; i < __none__P__localVariableIndex; ++i )
{
final LocalVariableNode v = vi.next();
m.visitLocalVariable( v.name, v.desc, v.signature, l0, l3, i );
}
// create new local variables __none__P and __none__O
m.visitLocalVariable( "__none__P", "[Ljava/lang/Object;", null, l1, l3, __none__P__localVariableIndex );
m.visitLocalVariable( "__none__O", Type.getObjectType( interfaceName ).getDescriptor(), null, l2, l3, __none__O__localVariableIndex );
return m;
}
}
|
The user interface should consist of a form that contains the receipent's email address, the subject and body of the email. When the user submits the form, the application should send the email and display a success message.
The application should include a back-end with a function to validate email addresses and a server for sending the emails. The server should be configured with the proper authentication credentials for the email server and should be able to handle multiple concurrent requests.
|
<filename>packages/api/src/entities/IUser.ts
interface IUser {
id: string
name: string
email: string
socket: string
createdAt: Date
}
export { IUser }
|
angular.module('cms.shared').factory('shared.vimeoService', [
'$http',
'$q',
'shared.errorService',
function (
$http,
$q,
errorService
) {
var service = {},
serviceUrl = 'https://vimeo.com/api/oembed.json?url=https%3A%2F%2Fvimeo.com%2F';
/* QUERIES */
service.getVideoInfo = function (id) {
return wrapGetResponse(serviceUrl + id)
.then(function (response) {
return JSON.parse(response.responseText);
});
}
function wrapGetResponse(url) {
var def = $q.defer();
var xhr = new XMLHttpRequest();
xhr.addEventListener("load", onComplete);
xhr.open("GET", url);
xhr.send();
function onComplete() {
var response = this;
var isUnexpectedError = false;
var errorMsg = "";
switch (response.status) {
case 200:
break;
case 404:
errorMsg = "You aren't able to access the video because of privacy or permissions issues, or because the video is still transcoding.";
break;
case 403:
errorMsg = "Embed permissions are disabled for this video, so you can't embed it.";
break;
default:
isUnexpectedError = true;
errorMsg = "Something unexpected happened whilst connecting to the Vimeo API.";
}
if (!errorMsg.length) {
def.resolve(response);
} else {
var error = {
title: 'Vimeo API Error',
message: errorMsg,
response: response
}
if (isUnexpectedError) {
errorService.raise(error);
}
def.reject(error);
}
}
return def.promise;
}
return service;
}]);
|
def create_tempo_dictionary():
dictionary = {}
for i in range(40):
tempo_value = 10 + (i * 5)
dictionary[f"tempo_{i + 1}"] = tempo_value
return {key: dictionary[key] for key in list(dictionary.keys())[:2]}
|
<filename>okhelper/src/main/java/com/release/okhelper/builder/PostBuilder.java
package com.release.okhelper.builder;
import com.release.okhelper.callBack.ICallback;
import com.release.okhelper.request.PostRequest;
import java.io.File;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* @author Mr.release
* @date 2019/3/31/031
*/
public class PostBuilder extends BaseBuilder<PostBuilder> {
private String url;
private List<FileInput> files;
private Map<String, String> params;
public PostBuilder(String url) {
this.url = url;
}
public PostBuilder params(Map<String, String> params) {
this.params = params;
return this;
}
public PostBuilder param(String key, String val) {
if (this.params == null)
params = new LinkedHashMap<>();
params.put(key, val);
return this;
}
public PostBuilder files(String key, Map<String, File> files) {
for (String filename : files.keySet()) {
this.files.add(new FileInput(key, filename, files.get(filename)));
}
return this;
}
public PostBuilder file(String name, String filename, File file) {
if (this.files == null)
this.files = new ArrayList<>();
files.add(new FileInput(name, filename, file));
return this;
}
public void execute(ICallback callback) {
new PostRequest(url, params, headers, files).execute(callback);
}
public static class FileInput {
public String key;
public String filename;
public File file;
public FileInput(String name, String filename, File file) {
this.key = name;
this.filename = filename;
this.file = file;
}
@Override
public String toString() {
return "FileInput{" +
"key='" + key + '\'' +
", filename='" + filename + '\'' +
", file=" + file +
'}';
}
}
}
|
#!/bin/bash
./addlicense.sh
rm -f tuda_templates.zip
mkdir -p texmf
rm -rf texmf/*
mkdir -p texmf/doc/latex/tuda-ci
mkdir -p texmf/tex/latex/tuda-ci
cd example
latexmk --lualatex
mv DEMO-*.pdf ../texmf/doc/latex/tuda-ci
cd ..
cp -r tex/. texmf/tex/latex/tuda-ci/.
cp ~/tuda_logo.pdf texmf/tex/latex/tuda-ci/.
mkdir -p texmf/doc/latex/tuda-ci/example
cp example/*.tex texmf/doc/latex/tuda-ci/example/.
cp example/*.lco texmf/doc/latex/tuda-ci/example/.
cp example/*.bib texmf/doc/latex/tuda-ci/example/.
cp README.md texmf/doc/latex/tuda-ci/.
cd texmf
zip -ll -y -r tuda_templates.zip doc tex
mv tuda_templates.zip ../.
|
#!/usr/bin/env bash
echo "Generating UnrealGAMS project files"
echo "----------------------"
"$UE4_ROOT"/GenerateProjectFiles.sh -project="$UE4_GAMS"/UnrealGAMS.uproject -game -engine -Makefile -vscode
|
<reponame>eengineergz/Lambda
const data = [
{
tags: [
"enim",
"dolore",
"irure",
"consectetur"
],
post: "Ea occaecat aute minim esse in. Amet et reprehenderit eiusmod sit aute cupidatat dolor incididunt minim nisi. Quis veniam et amet esse sunt. Laborum et anim occaecat est velit commodo. Ipsum occaecat minim aliqua duis ea magna culpa id ea est proident sit ad. Veniam est amet aliquip anim labore. Elit esse est cupidatat sunt anim labore nostrud elit.\n\nExercitation esse irure mollit eiusmod amet deserunt amet tempor fugiat incididunt aliquip. Amet fugiat in et ex minim minim nulla adipisicing velit proident deserunt nulla. Tempor laboris amet ad minim Lorem cillum consectetur occaecat quis laboris. Exercitation elit duis adipisicing labore velit elit sit ipsum.",
title: "Nostrud consequat in eu mollit mollit culpa exercitation exercitation adipisicing dolore ad Lorem proident.",
"author": {
"last": "Blanchard",
"first": "Peck"
},
"picture": "http://placehold.it/32x32",
"id": 0
},
{
"tags": [
"est",
"velit",
"nisi",
"in",
"elit",
"pariatur"
],
"post": "Pariatur nulla exercitation aliqua consectetur ad ex exercitation eiusmod non consectetur. Reprehenderit et reprehenderit incididunt adipisicing aliqua reprehenderit quis ut. Adipisicing officia est id sunt mollit voluptate quis laboris consequat qui laboris duis ut. Veniam Lorem eiusmod labore reprehenderit minim incididunt esse reprehenderit nostrud occaecat.\n\nDeserunt irure eu pariatur ut. Proident ad dolore ad reprehenderit laboris tempor aute minim aute dolor ipsum. Lorem minim pariatur ullamco dolore consequat ut id officia laboris eiusmod duis laborum proident. Ea ullamco id aliquip consequat elit velit sint pariatur labore aliqua duis. Nostrud eiusmod labore nulla culpa ad sint minim ullamco. Lorem commodo consequat non ea ut pariatur quis laboris. Veniam aute aute reprehenderit est in amet est in ipsum ut.",
"title": "Culpa laboris Lorem id incididunt dolor et deserunt sunt culpa est.",
"author": {
"last": "Brown",
"first": "Eddie"
},
"picture": "http://placehold.it/32x32",
"id": 1
},
{
"tags": [
"cupidatat",
"Lorem",
"adipisicing",
"consectetur",
"sit",
"magna"
],
"post": "Ipsum deserunt do incididunt veniam nisi et tempor nulla. Irure in officia ea nulla eu. Cupidatat laboris cupidatat anim enim incididunt. Commodo sunt culpa deserunt nostrud dolore id sunt Lorem excepteur et ad commodo. Veniam sit dolore minim ex.\n\nId non qui laborum irure reprehenderit velit. Sunt esse eiusmod duis est. Exercitation labore voluptate et ad ea cupidatat.",
"title": "Veniam dolor culpa esse nulla.",
"author": {
"last": "Salinas",
"first": "Mabel"
},
"picture": "http://placehold.it/32x32",
"id": 2
},
{
"tags": [
"ad",
"aliquip",
"ullamco",
"proident",
"eu",
"velit"
],
"post": "Velit exercitation et do occaecat sunt minim exercitation. Ex magna esse ad ad commodo. Quis in incididunt minim nisi ut. Magna eiusmod aliqua esse nostrud irure sit consequat fugiat aute minim ipsum do esse labore. Lorem aute in consectetur cupidatat et nulla esse occaecat ea consequat in.\n\nEu ad occaecat nulla adipisicing sunt do nisi do sit esse nisi. Do minim velit ex pariatur Lorem exercitation nostrud ea. Dolore minim adipisicing irure et eiusmod eiusmod in nulla. Anim minim magna duis nulla ex reprehenderit veniam esse ad ipsum qui laboris ex exercitation. Adipisicing dolore ut voluptate commodo sunt laboris. Deserunt aute cupidatat deserunt velit duis pariatur ullamco et sunt ipsum adipisicing nostrud cupidatat quis.",
"title": "Est cupidatat fugiat culpa commodo eiusmod minim consectetur eu qui officia quis occaecat.",
"author": {
"last": "Manning",
"first": "Kelli"
},
"picture": "http://placehold.it/32x32",
"id": 3
},
{
"tags": [
"Lorem",
"aliquip",
"commodo"
],
"post": "Magna et tempor deserunt Lorem velit labore. Non nostrud minim officia deserunt irure dolor proident ullamco proident. Adipisicing occaecat id sint esse minim ea adipisicing exercitation et tempor id Lorem. Minim occaecat exercitation ad esse aliquip ullamco sunt eu aute. Proident Lorem excepteur ea anim laboris sint non dolore adipisicing nostrud nisi ut minim aliquip. Consequat est aliqua cillum non magna amet nisi sunt do laboris sint Lorem.\n\nCommodo irure deserunt elit aliqua. Culpa ullamco amet anim mollit id enim nisi dolore Lorem Lorem. Veniam do magna aliquip voluptate pariatur ad deserunt esse veniam Lorem consequat sunt sunt. Ea sunt magna et eu sint ad minim.",
"title": "Voluptate reprehenderit sunt aliqua ipsum reprehenderit.",
"author": {
"last": "Ochoa",
"first": "Serena"
},
"picture": "http://placehold.it/32x32",
"id": 4
}
];
|
package org.glamey.training.codes.sort;
import java.util.Arrays;
import java.util.Random;
public class QuickSortDemo {
public static void main(String[] args) {
int[] nums = new int[10];
for (int i = 0; i < 10; i++) {
nums[i] = i;
}
shuffle(nums);
System.out.println("before v1 -> " + Arrays.toString(nums));
quickSort_v1(nums, 0, nums.length - 1);
System.out.println("after v1 -> " + Arrays.toString(nums) + "\r\n");
nums = new int[] {47, 29, 71, 99, 78, 19, 24, 47};
System.out.println("before v2 -> " + Arrays.toString(nums));
quickSort_v2(nums, 0, nums.length - 1);
System.out.println("after v2 -> " + Arrays.toString(nums) + "\r\n");
nums = new int[] {9, 8, 7, 6, 5, 4, 3, 2, 1};
System.out.println("before v3 -> " + Arrays.toString(nums));
quickSort_v3(nums, 0, nums.length - 1);
System.out.println("after v3 -> " + Arrays.toString(nums) + "\r\n");
nums = new int[] {9, 8, 7, 6, 5, 4, 3, 2, 1};
System.out.println("before v4 -> " + Arrays.toString(nums));
quickSort_v4(nums, 0, nums.length - 1);
System.out.println("after v4 -> " + Arrays.toString(nums) + "\r\n");
}
private static void quickSort_v4(int[] nums, int left, int right) {
if (nums == null || nums.length <= 1 || left > right) {
return;
}
int i = left, j = right, p = left, pVal = nums[p];
while (i < j) {
while (i < j && nums[j] >= pVal) {
j--;
}
nums[i] = nums[j];
while (i < j && nums[i] <= pVal) {
i++;
}
nums[j] = nums[i];
}
if (i == j) {
nums[i] = pVal;
}
quickSort_v4(nums, left, i - 1);
quickSort_v4(nums, i + 1, right);
}
private static void quickSort_v3(int[] nums, int left, int right) {
if (nums == null || nums.length <= 1 || left > right) {
return;
}
int partitionIndex = partition(nums, left, right);
quickSort_v3(nums, left, partitionIndex - 1);
quickSort_v3(nums, partitionIndex + 1, right);
}
private static int partition(int[] nums, int left, int right) {
int p = left, pVal = nums[p];
while (left < right) {
while (left < right && nums[right] >= pVal) {
right--;
}
nums[left] = nums[right];
while (left < right && nums[left] <= pVal) {
left++;
}
nums[right] = nums[left];
}
if (left == right) {
nums[left] = pVal;
}
return left;
}
private static void quickSort_v2(int[] nums, int left, int right) {
if (nums == null || nums.length <= 1 || left > right) {
return;
}
int i = left, j = right, pVal = nums[left];
while (i <= j) {
while (i <= j && nums[j] >= pVal) {
j--;
}
if (i <= j) {
nums[i] = nums[j];
i++;
}
while (i <= j && nums[i] <= pVal) {
i++;
}
if (i <= j) {
nums[j] = nums[i];
j--;
}
}
nums[i] = pVal;
quickSort_v2(nums, left, i - 1);
quickSort_v2(nums, i + 1, right);
}
private static void quickSort_v1(int[] nums, int left, int right) {
if (left >= right || nums == null || nums.length <= 1) {
return;
}
int i = left, j = right, p = (left + right) >>> 1, pVal = nums[p];
while (i <= j) {
while (pVal > nums[i]) {
++i;
}
while (pVal < nums[j]) {
--j;
}
if (i < j) {
int tmp = nums[i];
nums[i] = nums[j];
nums[j] = tmp;
++i;
--j;
} else if (i == j) {
++i;
}
}
quickSort_v1(nums, left, j);
quickSort_v1(nums, i, right);
}
private static void shuffle(int[] nums) {
Random random = new Random();
for (int i = nums.length - 1; i > 0; i--) {
int r = random.nextInt(i);
int tmp = nums[r];
nums[r] = nums[i];
nums[i] = tmp;
}
}
}
|
<gh_stars>0
// tslint:disable-next-line:import-blacklist
import * as Rx from 'rxjs/Rx';
export class MarkerCluster extends MarkerClusterer {
constructor(map: google.maps.Map, markers?: google.maps.Marker[], options?: MarkerClustererOptions) {
super(map, markers, options);
}
}
|
<filename>oarepo_s3/ext.py
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CESNET
#
# oarepo-s3 is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""S3 file storage support for Invenio.
To use this module together with Invenio-Files-Rest there are a few things you
need to keep in mind.
The storage factory configuration variable, ``FILES_REST_STORAGE_FACTORY``
needs to be set to ``'oarepo_s3.s3fs_storage_factory'`` importable string.
We think the best way to use this module is to have one `Localtion
<https://invenio-files-rest.readthedocs.io/en/latest/api.html#module-invenio_files_rest.models>`_
for each S3 bucket. This is just for simplicity, it can used however needed.
When creating a new location which will use the S3 API, the URI needs to start
with ``s3://``, for example
``invenio files location s3_default s3://my-bucket --default`` will
create a new location, set it as default location for your instance and use the
bucket ``my-bucket``. For more information about this command check
`Invenio-Files-Rest <https://invenio-files-rest.readthedocs.io/en/latest/>`_
documentation.
Then, there are a few configuration variables that need to be set on your
instance, like the endpoint, the access key and the secret access key, see a
more detailed description in :any:`configuration`.
.. note::
This module doesn't create S3 buckets automatically, so before starting they
need to be created.
You might also want to set the correct `CORS configuration
<https://docs.aws.amazon.com/AmazonS3/latest/dev/cors.html>`_ so files can
be used by your interface for things like previewing a PDF with some
Javascript library.
"""
from flask import current_app
from invenio_base.utils import obj_or_import_string
from . import config
class OARepoS3State(object):
"""OARepo-S3 extension state."""
def __init__(self, app):
"""Initialize the state."""
self.app = app
@property
def tenant(self):
return self.app.config.get('S3_TENANT', None)
@property
def client(self):
client = obj_or_import_string(self.app.config['S3_CLIENT'])
s3_info = current_app.extensions['invenio-s3'].init_s3fs_info
return client(access_key=s3_info['key'],
secret_key=s3_info['secret'],
client_kwargs=s3_info['client_kwargs'],
tenant=self.tenant,
config_kwargs=s3_info['config_kwargs'])
class OARepoS3(object):
"""OARepo-S3 extension."""
def __init__(self, app=None):
"""Extension initialization."""
if app:
self.init_app(app)
def init_app(self, app):
"""Flask application initialization."""
self.init_config(app)
_state = OARepoS3State(app)
app.extensions['oarepo-s3'] = _state
def init_config(self, app):
"""Initialize configuration."""
for k in dir(config):
if k.startswith('S3_'):
app.config.setdefault(k, getattr(config, k))
|
from __future__ import unicode_literals
from django.test import override_settings
from rest_framework import status
from rest_api.tests import BaseAPITestCase
from ..models import Key
from ..permissions import (
permission_key_delete, permission_key_upload, permission_key_view
)
from .literals import TEST_KEY_DATA, TEST_KEY_FINGERPRINT
@override_settings(OCR_AUTO_OCR=False)
class KeyAPITestCase(BaseAPITestCase):
def setUp(self):
super(KeyAPITestCase, self).setUp()
self.login_user()
def _create_key(self):
return Key.objects.create(key_data=TEST_KEY_DATA)
# Key creation by upload
def _request_key_create_view(self):
return self.post(
viewname='rest_api:key-list', data={
'key_data': TEST_KEY_DATA
}
)
def test_key_create_view_no_permission(self):
response = self._request_key_create_view()
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(Key.objects.all().count(), 0)
def test_key_create_view_with_permission(self):
self.grant_permission(permission=permission_key_upload)
response = self._request_key_create_view()
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['fingerprint'], TEST_KEY_FINGERPRINT)
key = Key.objects.first()
self.assertEqual(Key.objects.count(), 1)
self.assertEqual(key.fingerprint, TEST_KEY_FINGERPRINT)
# Key deletion
def _request_key_delete_view(self):
return self.delete(
viewname='rest_api:key-detail', args=(self.key.pk,)
)
def test_key_delete_view_no_access(self):
self.key = self._create_key()
response = self._request_key_delete_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(Key.objects.count(), 1)
def test_key_delete_view_with_access(self):
self.key = self._create_key()
self.grant_access(
permission=permission_key_delete, obj=self.key
)
response = self._request_key_delete_view()
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(Key.objects.count(), 0)
# Key detail
def _request_key_detail_view(self):
return self.get(
viewname='rest_api:key-detail', args=(self.key.pk,)
)
def test_key_detail_view_no_access(self):
self.key = self._create_key()
response = self._request_key_detail_view()
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_key_detail_view_with_access(self):
self.key = self._create_key()
self.grant_access(
permission=permission_key_view, obj=self.key
)
response = self._request_key_detail_view()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(
response.data['fingerprint'], self.key.fingerprint
)
|
export PYENV_ROOT=~/.pyenv
# init according to man page
if (( $+commands[pyenv] ))
then
eval "$(pyenv init -)"
eval "$(pyenv virtualenv-init -)"
fi
|
<gh_stars>1-10
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module Talent
module V4
# Message representing a period of time between two timestamps.
# @!attribute [rw] start_time
# @return [::Google::Protobuf::Timestamp]
# Begin of the period (inclusive).
# @!attribute [rw] end_time
# @return [::Google::Protobuf::Timestamp]
# End of the period (exclusive).
class TimestampRange
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# A resource that represents a location with full geographic information.
# @!attribute [rw] location_type
# @return [::Google::Cloud::Talent::V4::Location::LocationType]
# The type of a location, which corresponds to the address lines field of
# {::Google::Type::PostalAddress google.type.PostalAddress}. For example, "Downtown, Atlanta, GA, USA"
# has a type of {::Google::Cloud::Talent::V4::Location::LocationType::NEIGHBORHOOD LocationType.NEIGHBORHOOD}, and "Kansas City, KS, USA"
# has a type of {::Google::Cloud::Talent::V4::Location::LocationType::LOCALITY LocationType.LOCALITY}.
# @!attribute [rw] postal_address
# @return [::Google::Type::PostalAddress]
# Postal address of the location that includes human readable information,
# such as postal delivery and payments addresses. Given a postal address,
# a postal service can deliver items to a premises, P.O. Box, or other
# delivery location.
# @!attribute [rw] lat_lng
# @return [::Google::Type::LatLng]
# An object representing a latitude/longitude pair.
# @!attribute [rw] radius_miles
# @return [::Float]
# Radius in miles of the job location. This value is derived from the
# location bounding box in which a circle with the specified radius
# centered from {::Google::Type::LatLng google.type.LatLng} covers the area associated with the
# job location.
# For example, currently, "Mountain View, CA, USA" has a radius of
# 6.17 miles.
class Location
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# An enum which represents the type of a location.
module LocationType
# Default value if the type isn't specified.
LOCATION_TYPE_UNSPECIFIED = 0
# A country level location.
COUNTRY = 1
# A state or equivalent level location.
ADMINISTRATIVE_AREA = 2
# A county or equivalent level location.
SUB_ADMINISTRATIVE_AREA = 3
# A city or equivalent level location.
LOCALITY = 4
# A postal code level location.
POSTAL_CODE = 5
# A sublocality is a subdivision of a locality, for example a city borough,
# ward, or arrondissement. Sublocalities are usually recognized by a local
# political authority. For example, Manhattan and Brooklyn are recognized
# as boroughs by the City of New York, and are therefore modeled as
# sublocalities.
SUB_LOCALITY = 6
# A district or equivalent level location.
SUB_LOCALITY_1 = 7
# A smaller district or equivalent level display.
SUB_LOCALITY_2 = 8
# A neighborhood level location.
NEIGHBORHOOD = 9
# A street address level location.
STREET_ADDRESS = 10
end
end
# Meta information related to the job searcher or entity
# conducting the job search. This information is used to improve the
# performance of the service.
# @!attribute [rw] domain
# @return [::String]
# Required if {::Google::Cloud::Talent::V4::RequestMetadata#allow_missing_ids allow_missing_ids} is unset or `false`.
#
# The client-defined scope or source of the service call, which typically
# is the domain on
# which the service has been implemented and is currently being run.
#
# For example, if the service is being run by client <em>Foo, Inc.</em>, on
# job board www.foo.com and career site www.bar.com, then this field is
# set to "foo.com" for use on the job board, and "bar.com" for use on the
# career site.
#
# Note that any improvements to the model for a particular tenant site rely
# on this field being set correctly to a unique domain.
#
# The maximum number of allowed characters is 255.
# @!attribute [rw] session_id
# @return [::String]
# Required if {::Google::Cloud::Talent::V4::RequestMetadata#allow_missing_ids allow_missing_ids} is unset or `false`.
#
# A unique session identification string. A session is defined as the
# duration of an end user's interaction with the service over a certain
# period.
# Obfuscate this field for privacy concerns before
# providing it to the service.
#
# Note that any improvements to the model for a particular tenant site rely
# on this field being set correctly to a unique session ID.
#
# The maximum number of allowed characters is 255.
# @!attribute [rw] user_id
# @return [::String]
# Required if {::Google::Cloud::Talent::V4::RequestMetadata#allow_missing_ids allow_missing_ids} is unset or `false`.
#
# A unique user identification string, as determined by the client.
# To have the strongest positive impact on search quality
# make sure the client-level is unique.
# Obfuscate this field for privacy concerns before
# providing it to the service.
#
# Note that any improvements to the model for a particular tenant site rely
# on this field being set correctly to a unique user ID.
#
# The maximum number of allowed characters is 255.
# @!attribute [rw] allow_missing_ids
# @return [::Boolean]
# Only set when any of {::Google::Cloud::Talent::V4::RequestMetadata#domain domain}, {::Google::Cloud::Talent::V4::RequestMetadata#session_id session_id} and {::Google::Cloud::Talent::V4::RequestMetadata#user_id user_id} isn't
# available for some reason. It is highly recommended not to set this field
# and provide accurate {::Google::Cloud::Talent::V4::RequestMetadata#domain domain}, {::Google::Cloud::Talent::V4::RequestMetadata#session_id session_id} and {::Google::Cloud::Talent::V4::RequestMetadata#user_id user_id} for the
# best service experience.
# @!attribute [rw] device_info
# @return [::Google::Cloud::Talent::V4::DeviceInfo]
# The type of device used by the job seeker at the time of the call to the
# service.
class RequestMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Additional information returned to client, such as debugging information.
# @!attribute [rw] request_id
# @return [::String]
# A unique id associated with this call.
# This id is logged for tracking purposes.
class ResponseMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Device information collected from the job seeker, candidate, or
# other entity conducting the job search. Providing this information improves
# the quality of the search results across devices.
# @!attribute [rw] device_type
# @return [::Google::Cloud::Talent::V4::DeviceInfo::DeviceType]
# Type of the device.
# @!attribute [rw] id
# @return [::String]
# A device-specific ID. The ID must be a unique identifier that
# distinguishes the device from other devices.
class DeviceInfo
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# An enumeration describing an API access portal and exposure mechanism.
module DeviceType
# The device type isn't specified.
DEVICE_TYPE_UNSPECIFIED = 0
# A desktop web browser, such as, Chrome, Firefox, Safari, or Internet
# Explorer)
WEB = 1
# A mobile device web browser, such as a phone or tablet with a Chrome
# browser.
MOBILE_WEB = 2
# An Android device native application.
ANDROID = 3
# An iOS device native application.
IOS = 4
# A bot, as opposed to a device operated by human beings, such as a web
# crawler.
BOT = 5
# Other devices types.
OTHER = 6
end
end
# Custom attribute values that are either filterable or non-filterable.
# @!attribute [rw] string_values
# @return [::Array<::String>]
# Exactly one of {::Google::Cloud::Talent::V4::CustomAttribute#string_values string_values} or {::Google::Cloud::Talent::V4::CustomAttribute#long_values long_values} must be specified.
#
# This field is used to perform a string match (`CASE_SENSITIVE_MATCH` or
# `CASE_INSENSITIVE_MATCH`) search.
# For filterable `string_value`s, a maximum total number of 200 values
# is allowed, with each `string_value` has a byte size of no more than
# 500B. For unfilterable `string_values`, the maximum total byte size of
# unfilterable `string_values` is 50KB.
#
# Empty string isn't allowed.
# @!attribute [rw] long_values
# @return [::Array<::Integer>]
# Exactly one of {::Google::Cloud::Talent::V4::CustomAttribute#string_values string_values} or {::Google::Cloud::Talent::V4::CustomAttribute#long_values long_values} must be specified.
#
# This field is used to perform number range search.
# (`EQ`, `GT`, `GE`, `LE`, `LT`) over filterable `long_value`.
#
# Currently at most 1 {::Google::Cloud::Talent::V4::CustomAttribute#long_values long_values} is supported.
# @!attribute [rw] filterable
# @return [::Boolean]
# If the `filterable` flag is true, the custom field values may be used for
# custom attribute filters {::Google::Cloud::Talent::V4::JobQuery#custom_attribute_filter JobQuery.custom_attribute_filter}.
# If false, these values may not be used for custom attribute filters.
#
# Default is false.
# @!attribute [rw] keyword_searchable
# @return [::Boolean]
# If the `keyword_searchable` flag is true, the keywords in custom fields are
# searchable by keyword match.
# If false, the values are not searchable by keyword match.
#
# Default is false.
class CustomAttribute
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Spell check result.
# @!attribute [rw] corrected
# @return [::Boolean]
# Indicates if the query was corrected by the spell checker.
# @!attribute [rw] corrected_text
# @return [::String]
# Correction output consisting of the corrected keyword string.
# @!attribute [rw] corrected_html
# @return [::String]
# Corrected output with html tags to highlight the corrected words.
# Corrected words are called out with the "<b><i>...</i></b>" html tags.
#
# For example, the user input query is "software enginear", where the second
# word, "enginear," is incorrect. It should be "engineer". When spelling
# correction is enabled, this value is
# "software <b><i>engineer</i></b>".
class SpellingCorrection
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Job compensation details.
# @!attribute [rw] entries
# @return [::Array<::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry>]
# Job compensation information.
#
# At most one entry can be of type
# {::Google::Cloud::Talent::V4::CompensationInfo::CompensationType::BASE CompensationInfo.CompensationType.BASE}, which is
# referred as **base compensation entry** for the job.
# @!attribute [r] annualized_base_compensation_range
# @return [::Google::Cloud::Talent::V4::CompensationInfo::CompensationRange]
# Output only. Annualized base compensation range. Computed as base compensation entry's
# {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#amount CompensationEntry.amount} times
# {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#expected_units_per_year CompensationEntry.expected_units_per_year}.
#
# See {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry CompensationEntry} for explanation on compensation annualization.
# @!attribute [r] annualized_total_compensation_range
# @return [::Google::Cloud::Talent::V4::CompensationInfo::CompensationRange]
# Output only. Annualized total compensation range. Computed as all compensation entries'
# {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#amount CompensationEntry.amount} times
# {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#expected_units_per_year CompensationEntry.expected_units_per_year}.
#
# See {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry CompensationEntry} for explanation on compensation annualization.
class CompensationInfo
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# A compensation entry that represents one component of compensation, such
# as base pay, bonus, or other compensation type.
#
# Annualization: One compensation entry can be annualized if
# - it contains valid {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#amount amount} or {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#range range}.
# - and its {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#expected_units_per_year expected_units_per_year} is set or can be derived.
# Its annualized range is determined as ({::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#amount amount} or {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#range range}) times
# {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#expected_units_per_year expected_units_per_year}.
# @!attribute [rw] type
# @return [::Google::Cloud::Talent::V4::CompensationInfo::CompensationType]
# Compensation type.
#
# Default is {::Google::Cloud::Talent::V4::CompensationInfo::CompensationType::COMPENSATION_TYPE_UNSPECIFIED CompensationType.COMPENSATION_TYPE_UNSPECIFIED}.
# @!attribute [rw] unit
# @return [::Google::Cloud::Talent::V4::CompensationInfo::CompensationUnit]
# Frequency of the specified amount.
#
# Default is {::Google::Cloud::Talent::V4::CompensationInfo::CompensationUnit::COMPENSATION_UNIT_UNSPECIFIED CompensationUnit.COMPENSATION_UNIT_UNSPECIFIED}.
# @!attribute [rw] amount
# @return [::Google::Type::Money]
# Compensation amount.
# @!attribute [rw] range
# @return [::Google::Cloud::Talent::V4::CompensationInfo::CompensationRange]
# Compensation range.
# @!attribute [rw] description
# @return [::String]
# Compensation description. For example, could
# indicate equity terms or provide additional context to an estimated
# bonus.
# @!attribute [rw] expected_units_per_year
# @return [::Google::Protobuf::DoubleValue]
# Expected number of units paid each year. If not specified, when
# {::Google::Cloud::Talent::V4::Job#employment_types Job.employment_types} is FULLTIME, a default value is inferred
# based on {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#unit unit}. Default values:
# - HOURLY: 2080
# - DAILY: 260
# - WEEKLY: 52
# - MONTHLY: 12
# - ANNUAL: 1
class CompensationEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Compensation range.
# @!attribute [rw] max_compensation
# @return [::Google::Type::Money]
# The maximum amount of compensation. If left empty, the value is set
# to a maximal compensation value and the currency code is set to
# match the {::Google::Type::Money#currency_code currency code} of
# min_compensation.
# @!attribute [rw] min_compensation
# @return [::Google::Type::Money]
# The minimum amount of compensation. If left empty, the value is set
# to zero and the currency code is set to match the
# {::Google::Type::Money#currency_code currency code} of max_compensation.
class CompensationRange
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# The type of compensation.
#
# For compensation amounts specified in non-monetary amounts,
# describe the compensation scheme in the {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#description CompensationEntry.description}.
#
# For example, tipping format is described in
# {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#description CompensationEntry.description} (for example, "expect 15-20% tips based
# on customer bill.") and an estimate of the tips provided in
# {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#amount CompensationEntry.amount} or {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#range CompensationEntry.range} ($10 per hour).
#
# For example, equity is described in {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#description CompensationEntry.description}
# (for example, "1% - 2% equity vesting over 4 years, 1 year cliff") and
# value estimated in {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#amount CompensationEntry.amount} or
# {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#range CompensationEntry.range}. If no value estimate is possible, units are
# {::Google::Cloud::Talent::V4::CompensationInfo::CompensationUnit::COMPENSATION_UNIT_UNSPECIFIED CompensationUnit.COMPENSATION_UNIT_UNSPECIFIED} and then further
# clarified in {::Google::Cloud::Talent::V4::CompensationInfo::CompensationEntry#description CompensationEntry.description} field.
module CompensationType
# Default value.
COMPENSATION_TYPE_UNSPECIFIED = 0
# Base compensation: Refers to the fixed amount of money paid to an
# employee by an employer in return for work performed. Base compensation
# does not include benefits, bonuses or any other potential compensation
# from an employer.
BASE = 1
# Bonus.
BONUS = 2
# Signing bonus.
SIGNING_BONUS = 3
# Equity.
EQUITY = 4
# Profit sharing.
PROFIT_SHARING = 5
# Commission.
COMMISSIONS = 6
# Tips.
TIPS = 7
# Other compensation type.
OTHER_COMPENSATION_TYPE = 8
end
# Pay frequency.
module CompensationUnit
# Default value.
COMPENSATION_UNIT_UNSPECIFIED = 0
# Hourly.
HOURLY = 1
# Daily.
DAILY = 2
# Weekly
WEEKLY = 3
# Monthly.
MONTHLY = 4
# Yearly.
YEARLY = 5
# One time.
ONE_TIME = 6
# Other compensation units.
OTHER_COMPENSATION_UNIT = 7
end
end
# Metadata used for long running operations returned by CTS batch APIs.
# It's used to replace {::Google::Longrunning::Operation#metadata google.longrunning.Operation.metadata}.
# @!attribute [rw] state
# @return [::Google::Cloud::Talent::V4::BatchOperationMetadata::State]
# The state of a long running operation.
# @!attribute [rw] state_description
# @return [::String]
# More detailed information about operation state.
# @!attribute [rw] success_count
# @return [::Integer]
# Count of successful item(s) inside an operation.
# @!attribute [rw] failure_count
# @return [::Integer]
# Count of failed item(s) inside an operation.
# @!attribute [rw] total_count
# @return [::Integer]
# Count of total item(s) inside an operation.
# @!attribute [rw] create_time
# @return [::Google::Protobuf::Timestamp]
# The time when the batch operation is created.
# @!attribute [rw] update_time
# @return [::Google::Protobuf::Timestamp]
# The time when the batch operation status is updated. The metadata and the
# {::Google::Cloud::Talent::V4::BatchOperationMetadata#update_time update_time} is refreshed every minute otherwise cached data is
# returned.
# @!attribute [rw] end_time
# @return [::Google::Protobuf::Timestamp]
# The time when the batch operation is finished and
# {::Google::Longrunning::Operation#done google.longrunning.Operation.done} is set to `true`.
class BatchOperationMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
module State
# Default value.
STATE_UNSPECIFIED = 0
# The batch operation is being prepared for processing.
INITIALIZING = 1
# The batch operation is actively being processed.
PROCESSING = 2
# The batch operation is processed, and at least one item has been
# successfully processed.
SUCCEEDED = 3
# The batch operation is done and no item has been successfully processed.
FAILED = 4
# The batch operation is in the process of cancelling after
# google.longrunning.Operations.CancelOperation is called.
CANCELLING = 5
# The batch operation is done after
# google.longrunning.Operations.CancelOperation is called. Any items
# processed before cancelling are returned in the response.
CANCELLED = 6
end
end
# An enum that represents the size of the company.
module CompanySize
# Default value if the size isn't specified.
COMPANY_SIZE_UNSPECIFIED = 0
# The company has less than 50 employees.
MINI = 1
# The company has between 50 and 99 employees.
SMALL = 2
# The company has between 100 and 499 employees.
SMEDIUM = 3
# The company has between 500 and 999 employees.
MEDIUM = 4
# The company has between 1,000 and 4,999 employees.
BIG = 5
# The company has between 5,000 and 9,999 employees.
BIGGER = 6
# The company has 10,000 or more employees.
GIANT = 7
end
# An enum that represents employee benefits included with the job.
module JobBenefit
# Default value if the type isn't specified.
JOB_BENEFIT_UNSPECIFIED = 0
# The job includes access to programs that support child care, such
# as daycare.
CHILD_CARE = 1
# The job includes dental services covered by a dental
# insurance plan.
DENTAL = 2
# The job offers specific benefits to domestic partners.
DOMESTIC_PARTNER = 3
# The job allows for a flexible work schedule.
FLEXIBLE_HOURS = 4
# The job includes health services covered by a medical insurance plan.
MEDICAL = 5
# The job includes a life insurance plan provided by the employer or
# available for purchase by the employee.
LIFE_INSURANCE = 6
# The job allows for a leave of absence to a parent to care for a newborn
# child.
PARENTAL_LEAVE = 7
# The job includes a workplace retirement plan provided by the
# employer or available for purchase by the employee.
RETIREMENT_PLAN = 8
# The job allows for paid time off due to illness.
SICK_DAYS = 9
# The job includes paid time off for vacation.
VACATION = 10
# The job includes vision services covered by a vision
# insurance plan.
VISION = 11
end
# Educational degree level defined in International Standard Classification
# of Education (ISCED).
module DegreeType
# Default value. Represents no degree, or early childhood education.
# Maps to ISCED code 0.
# Ex) Kindergarten
DEGREE_TYPE_UNSPECIFIED = 0
# Primary education which is typically the first stage of compulsory
# education. ISCED code 1.
# Ex) Elementary school
PRIMARY_EDUCATION = 1
# Lower secondary education; First stage of secondary education building on
# primary education, typically with a more subject-oriented curriculum.
# ISCED code 2.
# Ex) Middle school
LOWER_SECONDARY_EDUCATION = 2
# Middle education; Second/final stage of secondary education preparing for
# tertiary education and/or providing skills relevant to employment.
# Usually with an increased range of subject options and streams. ISCED
# code 3.
# Ex) High school
UPPER_SECONDARY_EDUCATION = 3
# Adult Remedial Education; Programmes providing learning experiences that
# build on secondary education and prepare for labour market entry and/or
# tertiary education. The content is broader than secondary but not as
# complex as tertiary education. ISCED code 4.
ADULT_REMEDIAL_EDUCATION = 4
# Associate's or equivalent; Short first tertiary programmes that are
# typically practically-based, occupationally-specific and prepare for
# labour market entry. These programmes may also provide a pathway to other
# tertiary programmes. ISCED code 5.
ASSOCIATES_OR_EQUIVALENT = 5
# Bachelor's or equivalent; Programmes designed to provide intermediate
# academic and/or professional knowledge, skills and competencies leading
# to a first tertiary degree or equivalent qualification. ISCED code 6.
BACHELORS_OR_EQUIVALENT = 6
# Master's or equivalent; Programmes designed to provide advanced academic
# and/or professional knowledge, skills and competencies leading to a
# second tertiary degree or equivalent qualification. ISCED code 7.
MASTERS_OR_EQUIVALENT = 7
# Doctoral or equivalent; Programmes designed primarily to lead to an
# advanced research qualification, usually concluding with the submission
# and defense of a substantive dissertation of publishable quality based on
# original research. ISCED code 8.
DOCTORAL_OR_EQUIVALENT = 8
end
# An enum that represents the employment type of a job.
module EmploymentType
# The default value if the employment type isn't specified.
EMPLOYMENT_TYPE_UNSPECIFIED = 0
# The job requires working a number of hours that constitute full
# time employment, typically 40 or more hours per week.
FULL_TIME = 1
# The job entails working fewer hours than a full time job,
# typically less than 40 hours a week.
PART_TIME = 2
# The job is offered as a contracted, as opposed to a salaried employee,
# position.
CONTRACTOR = 3
# The job is offered as a contracted position with the understanding
# that it's converted into a full-time position at the end of the
# contract. Jobs of this type are also returned by a search for
# {::Google::Cloud::Talent::V4::EmploymentType::CONTRACTOR EmploymentType.CONTRACTOR} jobs.
CONTRACT_TO_HIRE = 4
# The job is offered as a temporary employment opportunity, usually
# a short-term engagement.
TEMPORARY = 5
# The job is a fixed-term opportunity for students or entry-level job
# seekers to obtain on-the-job training, typically offered as a summer
# position.
INTERN = 6
# The is an opportunity for an individual to volunteer, where there's no
# expectation of compensation for the provided services.
VOLUNTEER = 7
# The job requires an employee to work on an as-needed basis with a
# flexible schedule.
PER_DIEM = 8
# The job involves employing people in remote areas and flying them
# temporarily to the work site instead of relocating employees and their
# families permanently.
FLY_IN_FLY_OUT = 9
# The job does not fit any of the other listed types.
OTHER_EMPLOYMENT_TYPE = 10
end
# An enum that represents the required experience level required for the job.
module JobLevel
# The default value if the level isn't specified.
JOB_LEVEL_UNSPECIFIED = 0
# Entry-level individual contributors, typically with less than 2 years of
# experience in a similar role. Includes interns.
ENTRY_LEVEL = 1
# Experienced individual contributors, typically with 2+ years of
# experience in a similar role.
EXPERIENCED = 2
# Entry- to mid-level managers responsible for managing a team of people.
MANAGER = 3
# Senior-level managers responsible for managing teams of managers.
DIRECTOR = 4
# Executive-level managers and above, including C-level positions.
EXECUTIVE = 5
end
# An enum that represents the categorization or primary focus of specific
# role. This value is different than the "industry" associated with a role,
# which is related to the categorization of the company listing the job.
module JobCategory
# The default value if the category isn't specified.
JOB_CATEGORY_UNSPECIFIED = 0
# An accounting and finance job, such as an Accountant.
ACCOUNTING_AND_FINANCE = 1
# An administrative and office job, such as an Administrative Assistant.
ADMINISTRATIVE_AND_OFFICE = 2
# An advertising and marketing job, such as Marketing Manager.
ADVERTISING_AND_MARKETING = 3
# An animal care job, such as Veterinarian.
ANIMAL_CARE = 4
# An art, fashion, or design job, such as Designer.
ART_FASHION_AND_DESIGN = 5
# A business operations job, such as Business Operations Manager.
BUSINESS_OPERATIONS = 6
# A cleaning and facilities job, such as Custodial Staff.
CLEANING_AND_FACILITIES = 7
# A computer and IT job, such as Systems Administrator.
COMPUTER_AND_IT = 8
# A construction job, such as General Laborer.
CONSTRUCTION = 9
# A customer service job, such s Cashier.
CUSTOMER_SERVICE = 10
# An education job, such as School Teacher.
EDUCATION = 11
# An entertainment and travel job, such as Flight Attendant.
ENTERTAINMENT_AND_TRAVEL = 12
# A farming or outdoor job, such as Park Ranger.
FARMING_AND_OUTDOORS = 13
# A healthcare job, such as Registered Nurse.
HEALTHCARE = 14
# A human resources job, such as Human Resources Director.
HUMAN_RESOURCES = 15
# An installation, maintenance, or repair job, such as Electrician.
INSTALLATION_MAINTENANCE_AND_REPAIR = 16
# A legal job, such as Law Clerk.
LEGAL = 17
# A management job, often used in conjunction with another category,
# such as Store Manager.
MANAGEMENT = 18
# A manufacturing or warehouse job, such as Assembly Technician.
MANUFACTURING_AND_WAREHOUSE = 19
# A media, communications, or writing job, such as Media Relations.
MEDIA_COMMUNICATIONS_AND_WRITING = 20
# An oil, gas or mining job, such as Offshore Driller.
OIL_GAS_AND_MINING = 21
# A personal care and services job, such as Hair Stylist.
PERSONAL_CARE_AND_SERVICES = 22
# A protective services job, such as Security Guard.
PROTECTIVE_SERVICES = 23
# A real estate job, such as Buyer's Agent.
REAL_ESTATE = 24
# A restaurant and hospitality job, such as Restaurant Server.
RESTAURANT_AND_HOSPITALITY = 25
# A sales and/or retail job, such Sales Associate.
SALES_AND_RETAIL = 26
# A science and engineering job, such as Lab Technician.
SCIENCE_AND_ENGINEERING = 27
# A social services or non-profit job, such as Case Worker.
SOCIAL_SERVICES_AND_NON_PROFIT = 28
# A sports, fitness, or recreation job, such as Personal Trainer.
SPORTS_FITNESS_AND_RECREATION = 29
# A transportation or logistics job, such as Truck Driver.
TRANSPORTATION_AND_LOGISTICS = 30
end
# An enum that represents the job posting region. In most cases, job postings
# don't need to specify a region. If a region is given, jobs are
# eligible for searches in the specified region.
module PostingRegion
# If the region is unspecified, the job is only returned if it
# matches the {::Google::Cloud::Talent::V4::LocationFilter LocationFilter}.
POSTING_REGION_UNSPECIFIED = 0
# In addition to exact location matching, job posting is returned when the
# {::Google::Cloud::Talent::V4::LocationFilter LocationFilter} in the search query is in the same administrative area
# as the returned job posting. For example, if a `ADMINISTRATIVE_AREA` job
# is posted in "CA, USA", it's returned if {::Google::Cloud::Talent::V4::LocationFilter LocationFilter} has
# "Mountain View".
#
# Administrative area refers to top-level administrative subdivision of this
# country. For example, US state, IT region, UK constituent nation and
# JP prefecture.
ADMINISTRATIVE_AREA = 1
# In addition to exact location matching, job is returned when
# {::Google::Cloud::Talent::V4::LocationFilter LocationFilter} in search query is in the same country as this job.
# For example, if a `NATION_WIDE` job is posted in "USA", it's
# returned if {::Google::Cloud::Talent::V4::LocationFilter LocationFilter} has 'Mountain View'.
NATION = 2
# Job allows employees to work remotely (telecommute).
# If locations are provided with this value, the job is
# considered as having a location, but telecommuting is allowed.
TELECOMMUTE = 3
end
# Deprecated. All resources are only visible to the owner.
#
# An enum that represents who has view access to the resource.
module Visibility
# Default value.
VISIBILITY_UNSPECIFIED = 0
# The resource is only visible to the GCP account who owns it.
ACCOUNT_ONLY = 1
# The resource is visible to the owner and may be visible to other
# applications and processes at Google.
SHARED_WITH_GOOGLE = 2
# The resource is visible to the owner and may be visible to all other API
# clients.
SHARED_WITH_PUBLIC = 3
end
# Option for HTML content sanitization on user input fields, for example, job
# description. By setting this option, user can determine whether and how
# sanitization is performed on these fields.
module HtmlSanitization
# Default value.
HTML_SANITIZATION_UNSPECIFIED = 0
# Disables sanitization on HTML input.
HTML_SANITIZATION_DISABLED = 1
# Sanitizes HTML input, only accepts bold, italic, ordered list, and
# unordered list markup tags.
SIMPLE_FORMATTING_ONLY = 2
end
# Method for commute. Walking, biking and wheelchair accessible transit is
# still in the Preview stage.
module CommuteMethod
# Commute method isn't specified.
COMMUTE_METHOD_UNSPECIFIED = 0
# Commute time is calculated based on driving time.
DRIVING = 1
# Commute time is calculated based on public transit including bus, metro,
# subway, and so on.
TRANSIT = 2
# Commute time is calculated based on walking time.
WALKING = 3
# Commute time is calculated based on biking time.
CYCLING = 4
# Commute time is calculated based on public transit that is wheelchair
# accessible.
TRANSIT_ACCESSIBLE = 5
end
end
end
end
end
|
#!/bin/bash
prog=${0##*/}
progdir=${0%/*}
fail () {
echo "$prog:" "$@" >&2
exit 1
}
add_values_to () {
config=$1
shift
printf "%s=%s\n" >> "/etc/condor/config.d/$config" "$@"
}
# Create a config file from the environment.
# The config file needs to be on disk instead of referencing the env
# at run time so condor_config_val can work.
echo "# This file was created by $prog" > /etc/condor/config.d/01-env.conf
add_values_to 01-env.conf \
CONDOR_HOST "${CONDOR_HOST:-\$(FULL_HOSTNAME)}" \
USE_POOL_PASSWORD "${USE_POOL_PASSWORD:-no}"
bash -x "$progdir/update-config" || fail "Failed to update config"
bash -x "$progdir/update-secrets" || fail "Failed to update secrets"
# Bug workaround: daemons will die if they can't raise the number of FD's;
# cap the request if we can't raise it.
hard_max=$(ulimit -Hn)
rm -f /etc/condor/config.d/01-fdfix.conf
# Try to raise the hard limit ourselves. If we can't raise it, lower
# the limits in the condor config to the maximum allowable.
for attr in COLLECTOR_MAX_FILE_DESCRIPTORS \
SHARED_PORT_MAX_FILE_DESCRIPTORS \
SCHEDD_MAX_FILE_DESCRIPTORS \
MAX_FILE_DESCRIPTORS; do
config_max=$(condor_config_val -evaluate $attr 2>/dev/null)
if [[ $config_max =~ ^[0-9]+$ && $config_max -gt $hard_max ]]; then
if ! ulimit -Hn "$config_max" &>/dev/null; then
add_values_to 01-fdfix.conf "$attr" "$hard_max"
fi
ulimit -Hn "$hard_max"
fi
done
[[ -s /etc/condor/config.d/01-fdfix.conf ]] && \
echo "# This file was created by $prog" >> /etc/condor/config.d/01-fdfix.conf
# This isn't a real service, I can't start it via supervisor
/etc/init.d/gratia-probes-cron start
/usr/sbin/fetch-crl -p 20 -T 10
exec /usr/bin/supervisord -c /etc/supervisord.conf
# vim:et:sw=4:sts=4:ts=8
|
/*global angular*/
import AlarmFactory from './alarm_factory';
export default angular.module('apps/sentinl.alarmFactory', []).factory('alarmFactory',
/* @ngInject */ ($http, $injector) => {
return new AlarmFactory($http, $injector);
});
|
<reponame>chlds/util
/* **** Notes
Commandlet to open a file
Attention:
This code
is for a doubly LL i.e.,
<NOT> for a circular LL..
Remarks:
Implemented along with fn. cv_v() and with fn. rl_v().
And also with a flag to be added for code to run as far as possible to the end.
*/
# define C_CODE_STDS
# define CCR
# include "../../../incl/config.h"
unsigned(__stdcall cmdl2_open(SAT(*argp))) {
auto signed char **pp;
auto signed char *p;
auto KNOT *cache,*lead,*base;
auto signed count;
auto signed fd;
auto signed i,r;
auto signed short interrupt_flag;
auto signed char c;
auto signed const QUANTUM = (0x10);
auto signed const SNOOZE = (0x04);
auto signed const DELAY = (0x02*(QUANTUM));
auto signed char const DELIMITER = (' ');
auto signed char const LINEFEED = ('\n');
if(!argp) return(0x00);
INC(R(Running,*argp));
/* **** Check the arguments */
cache = (*(CLI_INDEX+(R(knot,R(reel,*argp)))));
if(!cache) {
DEC(R(Running,*argp));
return(0x00);
}
p = (R(p,*cache));
r = ct_args(p);
if(!r) {
printf("%s \n","<< Error at fn. ct_args()");
DEC(R(Running,*argp));
return(0x00);
}
if(r<(0x02)) {
printf("%s \n","--open <file>");
printf("\n");
DEC(R(Running,*argp));
return(0x00);
}
/* splitting */
pp = (0x00);
r = cv_v(&pp,p);
if(!r) {
printf("%s \n","<< Error at fn. cv_v()");
DEC(R(Running,*argp));
return(0x00);
}
else {
// To open file name p.
p = (*(pp+(r+(~0x00))));
}
/* opening */
r = (O_RDONLY|(O_BINARY));
fd = op_b(p,&r,(void*)0x00);
if(!(fd^(~0x00))) {
printf("%s \n","<< Error at fn. op_b()");
DEC(R(Running,*argp));
return(0x00);
}
// else printf("%s %s \n","Opened at file:",p);
/* reading/writing */
XOR(interrupt_flag,interrupt_flag);
XOR(i,i);
while(1) {
if(R(Announcements,*argp)) {
interrupt_flag++;
break;
}
/* reading */
r = rd_b(fd,&c,sizeof(c));
if(!(r^(~0x00))) {
printf("%s \n","<< Error at fn. rd_b()");
break;
}
if(!r) {
printf("\n");
printf("%s%s \n","Read at file: ",p);
break;
}
/* Writing */
r = wr_b(CLI_OUT,&c,r);
if(!(r^(~0x00))) {
printf("%s \n","<< Error at fn. wr_b()");
break;
}
// CPU idling
if(!(c^(DELIMITER))) {
sleep_b(DELAY);
}
if(!(c^(LINEFEED))) {
sleep_b(0x03*(DELAY));
}}
/* Notificate */
if(interrupt_flag) printf("\n\n%s \n","Attention: There was an interruption during reading and/or writing..");
/* Check an error flag e.g., and closing/unmapping out of the RAM */
r = cl_b(fd);
if(!(r^(~0x00))) {
printf("%s \n","<< Error at fn. cl_b()");
DEC(R(Running,*argp));
return(0x00);
}
/* Unmap all the buffers allocated by fn. cv_v() out of the RAM */
r = rl_v(&pp);
if(!r) {
printf("%s \n","<< Error at fn. rl_v()");
DEC(R(Running,*argp));
return(0x00);
}
printf("\n");
DEC(R(Running,*argp));
return(0x01);
}
|
def verify_parameter_value(contract, type1, type2, expected_address):
ret1 = contract.getCurrentOnVoteValueProposal(type1)
ret2 = contract.getCurrentOnVoteValueProposal(type2)
expected_address_hex = int(expected_address, 16)
if ret1 == expected_address_hex and ret2 == expected_address_hex:
return True
else:
return False
|
<filename>tests/test_connectors_parquet.py
"""
Created on 4 Mar 2020
@author: si
"""
import os
import sys
import unittest
PANDAS_NOT_INSTALLED = False
try:
import pandas as pd
except ModuleNotFoundError:
pd = None
try:
import pyarrow.parquet as pq
except ModuleNotFoundError:
pq = None
from ayeaye.connectors.parquet_connector import ParquetConnector
PROJECT_TEST_PATH = os.path.dirname(os.path.abspath(__file__))
EXAMPLE_HELLO = os.path.join(PROJECT_TEST_PATH, "data", "hello.parquet")
"""
Test data built with this-
import pandas as pd
import pyarrow as pa
import pyarrow.parquet as pq
df = pd.DataFrame({'name': ['Alice', 'Bob'],
'favorite_colour': ['blue', 'green'],
},
index=list('ab'))
pq.write_table(table, 'hello.parquet')
"""
@unittest.skipIf(pd is None, "Parquet not installed")
class TestParquetConnector(unittest.TestCase):
def test_read_as_rows(self):
"""
Iterate through a parqet file row by row for all columns.
"""
c = ParquetConnector(engine_url="parquet://" + EXAMPLE_HELLO)
all_records = [r.as_dict() for r in c]
self.assertEqual(2, len(all_records), "There are two sample records")
# there are a couple of other keys as a result of the pandas index. Just check the
# payload fields
wanted = [{"name": "Alice", "favorite_colour": "blue"}, {"name": "Bob", "favorite_colour": "green"}]
for idx, expected_row in enumerate(wanted):
for expected_key, expected_value in expected_row.items():
self.assertEqual(expected_value, all_records[idx][expected_key])
@unittest.skipIf(pd is None, "Pandas not installed")
def test_read_as_pandas(self):
c = ParquetConnector(engine_url="parquet://" + EXAMPLE_HELLO)
p = c.as_pandas()
self.assertIsInstance(p, pd.DataFrame)
self.assertEqual("Alice", p["name"][0], "Can't find expected value in Pandas dataframe")
|
<reponame>rsuite/rsuite-icons<gh_stars>1-10
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import Thermometer1Svg from '@rsuite/icon-font/lib/legacy/Thermometer1';
const Thermometer1 = createSvgIcon({
as: Thermometer1Svg,
ariaLabel: 'thermometer 1',
category: 'legacy',
displayName: 'Thermometer1'
});
export default Thermometer1;
|
function getPageTypeComposerControlIdentifier(string $handle): string
{
$bt = ConcreteBlockType::getByHandle($handle);
if ($bt !== null) {
return static::getPageTypeComposerControlByIdentifier($bt->getBlockTypeID());
} else {
return "Error: Handle does not correspond to any known block type";
}
}
|
"""
This script trains the TrueCase System
"""
import nltk
import os
import sys
import argparse
import cPickle
script_path=os.path.dirname(os.path.realpath(__file__))
truecaser_script_dir = os.path.join(script_path,"dependencies","truecaser")
sys.path.insert(1,truecaser_script_dir)
from TrainFunctions import *
def main(input_file, output_file):
uniDist = nltk.FreqDist()
backwardBiDist = nltk.FreqDist()
forwardBiDist = nltk.FreqDist()
trigramDist = nltk.FreqDist()
wordCasingLookup = {}
sentences = []
for line in input_file:
sentences.append(line.strip().decode('utf-8'))
tokens = [nltk.word_tokenize(sentence) for sentence in sentences]
updateDistributionsFromSentences(tokens, wordCasingLookup, uniDist, backwardBiDist, forwardBiDist, trigramDist)
cPickle.dump(uniDist, output_file, protocol=cPickle.HIGHEST_PROTOCOL)
cPickle.dump(backwardBiDist, output_file, protocol=cPickle.HIGHEST_PROTOCOL)
cPickle.dump(forwardBiDist, output_file, protocol=cPickle.HIGHEST_PROTOCOL)
cPickle.dump(trigramDist, output_file, protocol=cPickle.HIGHEST_PROTOCOL)
cPickle.dump(wordCasingLookup, output_file, protocol=cPickle.HIGHEST_PROTOCOL)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--input', '-i',
type=argparse.FileType('r'),
default=sys.stdin, metavar='PATH',
help="Input file (default: standard input)")
parser.add_argument('--output', '-o', type=argparse.FileType('wb'), metavar='PATH',
help="Output file (binary)")
args = parser.parse_args()
main(args.input, args.output)
|
<gh_stars>0
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import {TaskModule} from "../task/task.module";
import {ThoughtModule} from "../thought/thought.module";
import { DocumentOverviewComponent } from './document-overview/document-overview.component';
import { AddDocumentComponent } from './add-document/add-document.component';
import { DocumentPreviewComponent } from './document-preview/document-preview.component';
import { RouterModule, Routes } from '@angular/router';
import { TableModule } from 'primeng/table';
import { ButtonModule } from 'primeng/button';
import { RepositoryModule } from '../repository/repository.module';
import { MarkdownModule } from '../markdown/markdown.module';
import { CardModule } from 'primeng/card';
import { TagModule } from '../tag/tag.module';
import { BlockUIModule, CheckboxModule, DialogModule, DropdownModule, InputTextModule, MessageModule, MessagesModule, TooltipModule } from 'primeng/primeng';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { IdnadrevFilePreviewComponent } from './idnadrev-file-preview/idnadrev-file-preview.component';
import { FileTypePipe } from './file-type.pipe';
import { FormitemsModule } from '../formitems/formitems.module';
import { PageModule } from '../page/page.module';
import { DocumentViewComponent } from './document-view/document-view.component';
import { FilterModule } from '../filter/filter.module';
const routes: Routes = [
{path: 'doc/edit/:id', component: AddDocumentComponent},
{path: 'doc/add', component: AddDocumentComponent},
{path: 'doc/:id', component: DocumentViewComponent},
{path: 'doc', component: DocumentOverviewComponent},
];
@NgModule({
imports: [
CommonModule,
RouterModule.forChild(routes),
TableModule,
ButtonModule,
RepositoryModule,
MarkdownModule,
CardModule,
TagModule,
TooltipModule,
DialogModule,
InputTextModule,
FormsModule,
ReactiveFormsModule,
MessageModule,
MessagesModule,
BlockUIModule,
CheckboxModule,
DropdownModule,
TaskModule,
ThoughtModule,
FormitemsModule,
PageModule,
FilterModule
],
declarations: [DocumentOverviewComponent, AddDocumentComponent, DocumentPreviewComponent, IdnadrevFilePreviewComponent, FileTypePipe, DocumentViewComponent],
})
export class DocumentModule { }
|
"use strict";
import { Set, OrderedMap, OrderedSet, Map } from "immutable";
import { Type, ClassProperty, UnionType, ObjectType, combineTypeAttributesOfTypes, assertIsObject } from "./Type";
import {
TypeRef,
UnionBuilder,
TypeBuilder,
TypeLookerUp,
GraphRewriteBuilder,
TypeRefUnionAccumulator
} from "./TypeBuilder";
import { panic, assert, defined, unionOfSets } from "./Support";
import { TypeNames, namesTypeAttributeKind } from "./TypeNames";
import { TypeAttributes, combineTypeAttributes, emptyTypeAttributes } from "./TypeAttributes";
function getCliqueProperties(
clique: ObjectType[],
makePropertyType: (attributes: TypeAttributes, types: OrderedSet<Type>) => TypeRef
): [OrderedMap<string, ClassProperty>, TypeRef | undefined, boolean] {
let lostTypeAttributes = false;
let propertyNames = OrderedSet<string>();
for (const o of clique) {
propertyNames = propertyNames.union(o.properties.keySeq());
}
let properties = propertyNames
.toArray()
.map(name => [name, OrderedSet(), false] as [string, OrderedSet<Type>, boolean]);
let additionalProperties: OrderedSet<Type> | undefined = undefined;
for (const o of clique) {
let additional = o.additionalProperties;
if (additional !== undefined) {
if (additionalProperties === undefined) {
additionalProperties = OrderedSet();
}
if (additional !== undefined) {
additionalProperties = additionalProperties.add(additional);
}
}
for (let i = 0; i < properties.length; i++) {
let [name, types, isOptional] = properties[i];
const maybeProperty = o.properties.get(name);
if (maybeProperty === undefined) {
isOptional = true;
if (additional !== undefined && additional.kind !== "any") {
types = types.add(additional);
}
} else {
if (maybeProperty.isOptional) {
isOptional = true;
}
types = types.add(maybeProperty.type);
}
properties[i][1] = types;
properties[i][2] = isOptional;
}
}
const unifiedAdditionalProperties =
additionalProperties === undefined
? undefined
: makePropertyType(combineTypeAttributesOfTypes(additionalProperties), additionalProperties);
const unifiedPropertiesArray = properties.map(([name, types, isOptional]) => {
let attributes = combineTypeAttributesOfTypes(types);
attributes = namesTypeAttributeKind.setDefaultInAttributes(
attributes,
() => new TypeNames(OrderedSet([name]), OrderedSet(), true)
);
return [name, new ClassProperty(makePropertyType(attributes, types), isOptional)] as [string, ClassProperty];
});
const unifiedProperties = OrderedMap(unifiedPropertiesArray);
return [unifiedProperties, unifiedAdditionalProperties, lostTypeAttributes];
}
function countProperties(
clique: ObjectType[]
): { hasProperties: boolean; hasAdditionalProperties: boolean; hasNonAnyAdditionalProperties: boolean } {
let hasProperties = false;
let hasAdditionalProperties = false;
let hasNonAnyAdditionalProperties = false;
for (const o of clique) {
if (!o.properties.isEmpty()) {
hasProperties = true;
}
const additional = o.additionalProperties;
if (additional !== undefined) {
hasAdditionalProperties = true;
if (additional.kind !== "any") {
hasNonAnyAdditionalProperties = true;
}
}
}
return { hasProperties, hasAdditionalProperties, hasNonAnyAdditionalProperties };
}
export class UnifyUnionBuilder extends UnionBuilder<TypeBuilder & TypeLookerUp, TypeRef[], TypeRef[]> {
constructor(
typeBuilder: TypeBuilder & TypeLookerUp,
private readonly _makeEnums: boolean,
private readonly _makeObjectTypes: boolean,
private readonly _makeClassesFixed: boolean,
private readonly _unifyTypes: (typesToUnify: TypeRef[], typeAttributes: TypeAttributes) => TypeRef
) {
super(typeBuilder);
}
protected makeEnum(
enumCases: string[],
counts: { [name: string]: number },
typeAttributes: TypeAttributes,
forwardingRef: TypeRef | undefined
): TypeRef {
if (this._makeEnums) {
return this.typeBuilder.getEnumType(typeAttributes, OrderedSet(enumCases), forwardingRef);
} else {
return this.typeBuilder.getStringType(typeAttributes, OrderedMap(counts), forwardingRef);
}
}
protected makeObject(
objectRefs: TypeRef[],
typeAttributes: TypeAttributes,
forwardingRef: TypeRef | undefined
): TypeRef {
const maybeTypeRef = this.typeBuilder.lookupTypeRefs(objectRefs, forwardingRef);
// FIXME: Comparing this to `forwardingRef` feels like it will come
// crashing on our heads eventually. The reason we need it here is
// because `unifyTypes` registers the union that we're supposed to
// build here as a forwarding ref, and we end up with a circular
// ref if we just return it here.
if (maybeTypeRef !== undefined && maybeTypeRef !== forwardingRef) {
this.typeBuilder.addAttributes(maybeTypeRef, typeAttributes);
return maybeTypeRef;
}
const objectTypes = objectRefs.map(r => assertIsObject(r.deref()[0]));
const { hasProperties, hasAdditionalProperties, hasNonAnyAdditionalProperties } = countProperties(objectTypes);
if (!this._makeObjectTypes && (hasNonAnyAdditionalProperties || (!hasProperties && hasAdditionalProperties))) {
const propertyTypes = unionOfSets(objectTypes.map(o => o.properties.map(cp => cp.typeRef).toOrderedSet()));
const additionalPropertyTypes = OrderedSet(
objectTypes
.filter(o => o.additionalProperties !== undefined)
.map(o => defined(o.additionalProperties).typeRef)
);
const allPropertyTypes = propertyTypes.union(additionalPropertyTypes).toArray();
const tref = this.typeBuilder.getMapType(this._unifyTypes(allPropertyTypes, emptyTypeAttributes));
this.typeBuilder.addAttributes(tref, typeAttributes);
return tref;
} else {
const [properties, additionalProperties, lostTypeAttributes] = getCliqueProperties(
objectTypes,
(names, types) => {
assert(types.size > 0, "Property has no type");
return this._unifyTypes(types.map(t => t.typeRef).toArray(), names);
}
);
if (lostTypeAttributes) {
this.typeBuilder.setLostTypeAttributes();
}
if (this._makeObjectTypes) {
return this.typeBuilder.getUniqueObjectType(
typeAttributes,
properties,
additionalProperties,
forwardingRef
);
} else {
assert(additionalProperties === undefined, "We have additional properties but want to make a class");
return this.typeBuilder.getUniqueClassType(
typeAttributes,
this._makeClassesFixed,
properties,
forwardingRef
);
}
}
}
protected makeArray(
arrays: TypeRef[],
typeAttributes: TypeAttributes,
forwardingRef: TypeRef | undefined
): TypeRef {
const ref = this.typeBuilder.getArrayType(this._unifyTypes(arrays, Map()), forwardingRef);
this.typeBuilder.addAttributes(ref, typeAttributes);
return ref;
}
}
export function unionBuilderForUnification<T extends Type>(
typeBuilder: GraphRewriteBuilder<T>,
makeEnums: boolean,
makeObjectTypes: boolean,
makeClassesFixed: boolean,
conflateNumbers: boolean
): UnionBuilder<TypeBuilder & TypeLookerUp, TypeRef[], TypeRef[]> {
return new UnifyUnionBuilder(typeBuilder, makeEnums, makeObjectTypes, makeClassesFixed, (trefs, names) =>
unifyTypes(
Set(trefs.map(tref => tref.deref()[0])),
names,
typeBuilder,
unionBuilderForUnification(typeBuilder, makeEnums, makeObjectTypes, makeClassesFixed, conflateNumbers),
conflateNumbers
)
);
}
// FIXME: The UnionBuilder might end up not being used.
export function unifyTypes<T extends Type>(
types: Set<Type>,
typeAttributes: TypeAttributes,
typeBuilder: GraphRewriteBuilder<T>,
unionBuilder: UnionBuilder<TypeBuilder & TypeLookerUp, TypeRef[], TypeRef[]>,
conflateNumbers: boolean,
maybeForwardingRef?: TypeRef
): TypeRef {
if (types.isEmpty()) {
return panic("Cannot unify empty set of types");
} else if (types.count() === 1) {
const first = defined(types.first());
if (!(first instanceof UnionType)) {
const tref = typeBuilder.reconstituteTypeRef(first.typeRef, maybeForwardingRef);
typeBuilder.addAttributes(tref, typeAttributes);
return tref;
}
}
const typeRefs = types.toArray().map(t => t.typeRef);
const maybeTypeRef = typeBuilder.lookupTypeRefs(typeRefs, maybeForwardingRef);
if (maybeTypeRef !== undefined) {
typeBuilder.addAttributes(maybeTypeRef, typeAttributes);
return maybeTypeRef;
}
const accumulator = new TypeRefUnionAccumulator(conflateNumbers);
const nestedAttributes = accumulator.addTypes(types);
typeAttributes = combineTypeAttributes(typeAttributes, nestedAttributes);
return typeBuilder.withForwardingRef(maybeForwardingRef, forwardingRef => {
typeBuilder.registerUnion(typeRefs, forwardingRef);
return unionBuilder.buildUnion(accumulator, false, typeAttributes, forwardingRef);
});
}
|
<filename>internal/product/models.go
package product
import (
"time"
"gopkg.in/mgo.v2/bson"
)
// Product is something we have for sale.
type Product struct {
ID bson.ObjectId `bson:"_id" json:"id"` // Unique identifier.
Name string `bson:"name" json:"name"` // Display name of the product.
Notes string `bson:"notes" json:"notes"` // Optional descriptive field.
Family string `bson:"family" json:"family"` // Which family provided the product.
UnitPrice int `bson:"unit_price" json:"unit_price"` // Price for one item in cents.
Quantity int `bson:"quantity" json:"quantity"` // Original number of items available.
DateCreated time.Time `bson:"date_created" json:"date_created"` // When the product was added.
DateModified time.Time `bson:"date_modified" json:"date_modified"` // When the product record was lost modified.
}
// NewProduct defines the information we need when adding a Product to
// our offerings.
type NewProduct struct {
Name string `json:"name" validate:"required"`
Notes string `json:"notes"`
Family string `json:"family" validate:"required"`
UnitPrice int `json:"unit_price" validate:"required,gte=0"`
Quantity int `json:"quantity" validate:"required,gte=1"`
}
// UpdateProduct defines what information may be provided to modify an
// existing Product. All fields are optional so clients can send just the
// fields they want changed. It uses pointer fields so we can differentiate
// between a field that was not provided and a field that was provided as
// explicitly blank. Normally we do not want to use pointers to basic types but
// we make exceptions around marshalling/unmarshalling.
type UpdateProduct struct {
Name *string `json:"name"`
Notes *string `json:"notes"`
Family *string `json:"family"`
UnitPrice *int `json:"unit_price" validate:"omitempty,gte=0"`
Quantity *int `json:"quantity" validate:"omitempty,gte=1"`
}
// Sale represents a transaction where we sold some quantity of a
// Product.
type Sale struct{}
// NewSale defines what we require when creating a Sale record.
type NewSale struct{}
|
<reponame>unerh/cosmos<filename>packages/@cdk-cosmos/core/src/cosmos/cosmos-core-stack.ts
import { Construct, Stack, CfnOutput, Tags, IConstruct } from '@aws-cdk/core';
import { IHostedZone, HostedZone } from '@aws-cdk/aws-route53';
import { Role, ServicePrincipal, ManagedPolicy, CompositePrincipal } from '@aws-cdk/aws-iam';
import { NetworkBuilder } from '@aws-cdk/aws-ec2/lib/network-util';
import { Config, createCrossAccountExportProvider } from '@cosmos-building-blocks/common';
import { getPackageVersion } from '@cosmos-building-blocks/common/lib/utils';
import { BaseStack, BaseStackProps } from '../components/base';
import { RemoteZone } from '../components/remote';
const COSMOS_CORE_SYMBOL = Symbol.for('@cdk-cosmos/core.CosmosCore');
export interface ICosmosCore extends Construct {
readonly config: Config;
readonly libVersion: string;
readonly networkBuilder?: NetworkBuilder;
readonly rootZone: IHostedZone;
readonly cdkMasterRoleStaticArn: string;
readonly crossAccountExportServiceToken: string;
}
export interface CosmosCoreStackProps extends BaseStackProps {
tld: string;
}
export class CosmosCoreStack extends BaseStack implements ICosmosCore {
readonly config: Config;
readonly libVersion: string;
readonly rootZone: HostedZone;
readonly cdkMasterRole: Role;
readonly cdkMasterRoleStaticArn: string;
readonly crossAccountExportServiceToken: string;
constructor(scope: Construct, id: string, props: CosmosCoreStackProps) {
super(scope, id, {
description: 'Cosmos: Singleton resources for the Cosmos, like RootZone, CdkRepo and CdkMasterRole',
partition: 'Core',
type: 'Cosmos',
...props,
});
Object.defineProperty(this, COSMOS_CORE_SYMBOL, { value: true });
const { tld } = props;
this.config = new Config(this, 'Config', 'Cosmos');
this.libVersion = getPackageVersion(__dirname);
this.rootZone = new HostedZone(this, 'RootZone', {
zoneName: `${tld}`.toLowerCase(),
comment: `Core TLD Root Zone for ${this.node.id} Cosmos.`,
});
const cdkMasterRoleName = this.singletonId('CdkMasterRole');
this.cdkMasterRole = new Role(this, 'CdkMasterRole', {
roleName: cdkMasterRoleName,
assumedBy: new CompositePrincipal(
new ServicePrincipal('codebuild.amazonaws.com'),
new ServicePrincipal('codepipeline.amazonaws.com'),
new ServicePrincipal('lambda.amazonaws.com')
),
});
this.cdkMasterRole.addManagedPolicy(ManagedPolicy.fromAwsManagedPolicyName('AdministratorAccess'));
this.cdkMasterRoleStaticArn = `arn:aws:iam::${Stack.of(this).account}:role/${cdkMasterRoleName}`;
this.crossAccountExportServiceToken = createCrossAccountExportProvider(this, this.cdkMasterRole);
new CfnOutput(this, 'CoreLibVersion', {
exportName: this.singletonId('LibVersion'),
value: this.libVersion,
});
new RemoteZone(this.rootZone, this.singletonId('RootZone'));
new CfnOutput(this, 'CrossAccountExportServiceToken', {
exportName: this.singletonId('CrossAccountExportServiceToken'),
value: this.crossAccountExportServiceToken,
});
Tags.of(this).add('cosmos', this.node.id);
}
static isCosmosCore(x: any): x is CosmosCoreStack {
return typeof x === 'object' && x !== null && COSMOS_CORE_SYMBOL in x;
}
static of(construct: IConstruct): CosmosCoreStack {
const scopes = [construct, ...construct.node.scopes];
for (const scope of scopes) {
if (CosmosCoreStack.isCosmosCore(scope)) return scope;
}
throw new Error(`No Cosmos Core Stack could be identified for the construct at path ${construct.node.path}`);
}
}
|
//在一个 平衡字符串 中,'L' 和 'R' 字符的数量是相同的。
//
// 给你一个平衡字符串 s,请你将它分割成尽可能多的平衡字符串。
//
// 注意:分割得到的每个字符串都必须是平衡字符串。
//
// 返回可以通过分割得到的平衡字符串的 最大数量 。
//
//
//
// 示例 1:
//
//
//输入:s = "RLRRLLRLRL"
//输出:4
//解释:s 可以分割为 "RL"、"RRLL"、"RL"、"RL" ,每个子字符串中都包含相同数量的 'L' 和 'R' 。
//
//
// 示例 2:
//
//
//输入:s = "RLLLLRRRLR"
//输出:3
//解释:s 可以分割为 "RL"、"LLLRRR"、"LR" ,每个子字符串中都包含相同数量的 'L' 和 'R' 。
//
//
// 示例 3:
//
//
//输入:s = "LLLLRRRR"
//输出:1
//解释:s 只能保持原样 "LLLLRRRR".
//
//
// 示例 4:
//
//
//输入:s = "RLRRRLLRLL"
//输出:2
//解释:s 可以分割为 "RL"、"RRRLLRLL" ,每个子字符串中都包含相同数量的 'L' 和 'R' 。
//
//
//
//
// 提示:
//
//
// 1 <= s.length <= 1000
// s[i] = 'L' 或 'R'
// s 是一个 平衡 字符串
//
// Related Topics 贪心 字符串 计数
// 👍 125 👎 0
package algorithm_1200
func balancedStringSplit(s string) (res int) {
var l, r int
for i := 0; i < len(s); i++ {
if string(s[i]) == "L" {
l++
} else {
r++
}
if l == r {
res++
l, r = 0, 0
}
}
return
}
|
package has
import (
"context"
"fmt"
applicationservice "github.com/redhat-appstudio/application-service/api/v1alpha1"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/types"
"github.com/redhat-appstudio/e2e-tests/pkg/client"
)
type SuiteController struct {
*client.K8sClient
}
func NewSuiteController() (*SuiteController, error) {
client, err := client.NewK8SClient()
if err != nil {
return nil, fmt.Errorf("error creating client-go %v", err)
}
return &SuiteController{
client,
}, nil
}
//GetHasApplicationStatus return the status from the Application Custom Resource object
func (h *SuiteController) GetHasApplicationStatus(name, namespace string) (*applicationservice.ApplicationStatus, error) {
namespacedName := types.NamespacedName{
Name: name,
Namespace: namespace,
}
application := applicationservice.Application{}
err := h.KubeRest().Get(context.TODO(), namespacedName, &application)
if err != nil {
return nil, err
}
return &application.Status, nil
}
//CreateHasApplication create an application Custom Resource object
func (h *SuiteController) CreateHasApplication(name, namespace string) (*applicationservice.Application, error) {
application := applicationservice.Application{
ObjectMeta: v1.ObjectMeta{
Name: name,
Namespace: namespace,
},
Spec: applicationservice.ApplicationSpec{},
}
err := h.KubeRest().Create(context.TODO(), &application)
if err != nil {
return nil, err
}
return &application, nil
}
// DeleteHasApplication delete an has application from a given name and namespace
func (h *SuiteController) DeleteHasApplication(name, namespace string) error {
application := applicationservice.Application{
ObjectMeta: v1.ObjectMeta{
Name: name,
Namespace: namespace,
},
}
return h.KubeRest().Delete(context.TODO(), &application)
}
|
import React from "react";
import TimeLogForm from "./TimeLogForm";
import { FIELDS } from "./formConfig";
import moment from "moment";
import { isEmpty } from "lodash";
import {
getHours,
getAmPm,
getSelectedDate,
getSelectedTime,
} from "../Shared/dateTimeFieldHelpers.js";
const NewTimeLogDateTimeFields = ({
data,
handleTimeChange,
handleFormDisable,
isFormDisabled,
}) => {
function handleDateTimeFieldChange(date, fieldId, dateOrTime, data) {
// set an empty object that will hold the date/time data
let fieldData = {};
switch (dateOrTime) {
case "DATE":
fieldData.date = moment(date).format("MM/DD/YYYY");
fieldData.am_pm = isEmpty(data[fieldId]) ? "" : data[fieldId].am_pm;
fieldData.hours = isEmpty(data[fieldId]) ? "" : data[fieldId].hours;
fieldData.minutes = isEmpty(data[fieldId]) ? "" : data[fieldId].minutes;
break;
case "TIME":
let today = new Date();
fieldData.date = isEmpty(data[fieldId])
? moment(today).format("MM/DD/YYYY")
: data[fieldId].date;
fieldData.am_pm = getAmPm(date);
fieldData.hours = getHours(date);
fieldData.minutes = date.getMinutes();
break;
default:
console.log("default");
break;
}
handleTimeChange(fieldId, fieldData);
updateErrorState();
}
const getFormSelection = (field, dateOrTime) => {
if (dateOrTime === "time") {
return getSelectedTime(data, field);
} else if (dateOrTime === "date") {
return getSelectedDate(data, field);
}
};
const updateErrorState = () => {
const issueRecievedTime = getSelectedTime(
data,
FIELDS.TIMELOG.ISSUE_RECEIVED_TIME
);
const workSiteArriveTime = getSelectedTime(
data,
FIELDS.TIMELOG.WORKSITE_ARRIVE
);
const workSiteLeaveTime = getSelectedTime(
data,
FIELDS.TIMELOG.WORKSITE_LEAVE
);
const worksiteReturnTime = getSelectedTime(
data,
FIELDS.TIMELOG.WORKSITE_SHOP_RETURN
);
// TODO: Right now, we're checking to make sure times aren't greater than the checkpoint
// just previous to it. But if some times are left blank, a validation might pass that shouldn't
const shouldShowError =
(issueRecievedTime > workSiteArriveTime && !!workSiteArriveTime) ||
(workSiteArriveTime > workSiteLeaveTime && !!workSiteLeaveTime) ||
(workSiteLeaveTime > worksiteReturnTime && !!worksiteReturnTime);
if (shouldShowError) {
handleFormDisable(true);
} else {
handleFormDisable(false);
}
};
return (
<div>
<TimeLogForm
getFormSelection={getFormSelection}
handleFormChange={handleDateTimeFieldChange}
isFormDisabled={isFormDisabled}
data={data}
/>
</div>
);
};
export default NewTimeLogDateTimeFields;
|
SELECT e1.Name AS employee_name, e1.Salary AS employee_salary, e1.Department AS department_name
FROM Employees AS e1
WHERE e1.Salary =
(SELECT MAX(e2.Salary)
FROM Employees AS e2
WHERE e1.Department = e2.Department);
|
<reponame>combra-lab/combra_loihi
"""
MIT License
Copyright (c) 2018 <NAME>
Copyright (c) 2018 <NAME>
Copyright (c) 2018 Computational Brain Lab, Computer Science Department, Rutgers University
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
"""
This module contains the base class for the Astrocyte class.
"""
import os
import numpy as np
import nxsdk.api.n2a as nx
class AstrocyteInterfaceBase():
# --------------------------------------
# Validators
@staticmethod
def _validate_sic_window(val):
assert 0 <= val <= 608, \
"Illegal SIC window size = " + str(val) + " ms. " + \
"Must be an integer >= 0 and <= 608. For a value greater than 608 ms, please configure manually."
@staticmethod
def _validate_sic_firing_rate(val):
assert 0 <= val <= 356, \
"Illegal SIC maximum firing rate = " + str(val) + " Hz. " + \
"Must be an integer >= 0 and <= 356. For a value greater than 356 Hz., please configure manually."
@staticmethod
def _validate_ip3_sensitivity(val):
if not isinstance(val, int) or val < 1 or val > 100:
raise ValueError("IP3 Sensitivity value must be an integer between 1 and 100 (inclusive)")
class AstrocytePrototypeBase(AstrocyteInterfaceBase):
def __init__(self,
net: nx.NxNet,
ip3_sensitivity,
sic_amplitude,
sic_window,
srVThMant,
srCurrentDecay,
srVoltageDecay,
srActivityImpulse,
srActivityTimeConstant,
srMinActivity,
srMaxActivity,
srHomeostasisGain,
srEnableHomeostasis,
ip3VThMant,
ip3CurrentDecay,
ip3VoltageDecay,
sicCurrentDecay,
sicVoltageDecay,
sgVThMant,
sgCurrentDecay,
sgVoltageDecay,
sr2ip3Weight,
ip32sicWeight,
DEBUG=False):
"""
Initialize the parameters of the astrocyte model.
:param ip3_sensitivity: Spike time gap of ip3 integrator in ms
:param sic_amplitude: Max firing rate of SIC spike generator in Hz
:param sic_window: Firing window of SIC spike generator in ms
"""
# Loihi net
self.net = net
# Astrocyte Core Properties
# ---------------------------------------------------
# Spike Receiver Properties
self.srVThMant = srVThMant
self.srCurrentDecay = srCurrentDecay
self.srVoltageDecay = srVoltageDecay
self.srActivityImpulse = srActivityImpulse
self.srActivityTimeConstant = srActivityTimeConstant
self.srMinActivity = srMinActivity
self.srMaxActivity = srMaxActivity
self.srHomeostasisGain = srHomeostasisGain
self.srEnableHomeostasis = srEnableHomeostasis
# IP3 unit Properties
self.ip3VThMant = ip3VThMant
self.ip3CurrentDecay = ip3CurrentDecay
self.ip3VoltageDecay = ip3VoltageDecay
# SIC Properties
self.sicCurrentDecay = sicCurrentDecay
self.sicVoltageDecay = sicVoltageDecay
# Spike Generator Properties
self.sgVThMant = sgVThMant
self.sgCurrentDecay = sgCurrentDecay
self.sgVoltageDecay = sgVoltageDecay
# Spike Receiver to IP3 unit connection weight
self.sr2ip3Weight = sr2ip3Weight
self.ip32sicWeight = ip32sicWeight
# ---------------------------------------------------
# Smart Setup Properties
# ---------------------------------------------------
if sic_window is not None and sic_amplitude is not None:
if DEBUG:
print("DEBUG: Configuring based on provided window size and maximum firing rate")
self._validate_sic_window(sic_window)
self._validate_sic_firing_rate(sic_amplitude)
self.ip32sicWeight, self.sicCurrentDecay = AstrocytePrototypeBase._calculate_sic_props(sic_amplitude,
sic_window)
self.sicCurrentDecay = int(self.sicCurrentDecay * 2 ** 12)
self._sicWindow = sic_window
self._sicAmplitude = sic_amplitude
if ip3_sensitivity is not None:
if DEBUG:
print("DEBUG: Configuring based on provided IP3 Sensitivity level")
self.ip3Sensitivity = ip3_sensitivity
@property
def ip3Sensitivity(self):
"""
read ip3 sensitivity time of ip3 integrator spikes in ms
:return:
"""
return self._ip3Sensitivity
@property
def sicAmplitude(self):
"""
read sic amplitude of max sic spike generator firing rate in hz
:return:
"""
return self._sicAmplitude
@property
def sicWindow(self):
"""
read sic window of sic spike generator spike window in ms
:return:
"""
return self._sicWindow
@ip3Sensitivity.setter
def ip3Sensitivity(self, val):
"""
Set ip3 sensitivity and transform into Loihi Parameters
:param val: ip3 spike time in ms
:return:
"""
self._validate_ip3_sensitivity(val)
self._ip3Sensitivity = val
self.sr2ip3Weight = self._ip3Sensitivity
@sicAmplitude.setter
def sicAmplitude(self, val):
"""
Set sic amplitude and transform into Loihi Parameters
:param val: sic firing rate in hz
:return:
"""
self._validate_sic_firing_rate(val)
self._sicAmplitude = val
self.ip32sicWeight, self.sicCurrentDecay = AstrocytePrototypeBase._calculate_sic_props(self._sicAmplitude,
self._sicWindow)
self.sicCurrentDecay = int(self.sicCurrentDecay * 2 ** 12)
@sicWindow.setter
def sicWindow(self, val):
"""
Set sic window and transform into Loihi Parameters
:param val: sic firing window in ms
:return:
"""
self._validate_sic_window(val)
self._sicWindow = val
self.ip32sicWeight, self.sicCurrentDecay = AstrocytePrototypeBase._calculate_sic_props(self._sicAmplitude,
self._sicWindow)
self.sicCurrentDecay = int(self.sicCurrentDecay * 2 ** 12)
@staticmethod
def _calculate_sic_props(firing_rate, window_size):
"""
Calculate the optimal values to achieve closest specifications to those provided for the SIC.
:param firing_rate:
:param window_size:
:return: ip32sicWeight, sicCurrentDecay
"""
configs = np.load(os.path.join(os.path.dirname(__file__), "sic_data_table.npy"))
optimal_config = configs[15]
min_diff = AstrocytePrototypeBase._calc_diff(optimal_config[2], optimal_config[3], firing_rate, window_size)
for config in configs:
cost = AstrocytePrototypeBase._calc_diff(config[2], config[3], firing_rate, window_size)
if min_diff > cost:
min_diff = cost
optimal_config = config
return optimal_config[0], optimal_config[1]
@staticmethod
def _calc_diff(config_fr, config_ws, firing_rate, window_size):
return np.power(config_fr - firing_rate, 2) + np.power(config_ws - window_size, 2)
|
#include <iostream>
class PID {
private:
double Kp, Ki, Kd;
double p_error, i_error, d_error;
public:
PID(double Kp, double Ki, double Kd) : Kp(Kp), Ki(Ki), Kd(Kd), p_error(0), i_error(0), d_error(0) {}
void UpdateError(double cte) {
d_error = cte - p_error; // Calculate the derivative error
p_error = cte; // Update the proportional error
i_error += cte; // Accumulate the integral error
}
double TotalError() {
return -Kp * p_error - Ki * i_error - Kd * d_error; // Calculate and return the total error
}
};
int main() {
// Example usage of the PID controller
PID pid_controller(0.1, 0.01, 0.5); // Initialize PID controller with coefficients
double current_error = 0.05; // Current error from the system
pid_controller.UpdateError(current_error); // Update the PID errors
double control_output = pid_controller.TotalError(); // Calculate the control output
std::cout << "Control Output: " << control_output << std::endl;
return 0;
}
|
<filename>example/src/App.js
import React, { Component, Fragment } from "react";
import { Spiral } from "react-audible-visuals";
const BW = "B"
const styles = {
wrapper: {
height: "100vh",
width: "100vw",
bottom: 0,
left: 0,
display: "flex",
justifyContent: "center",
alignItems: "center",
zIndex: -1,
backgroundColor: BW === "B" ? "black" : "white",
},
text: {
color: BW === "B" ? "white" : "black",
}
};
export default class App extends Component {
render() {
return (
<Fragment>
<div style={styles.wrapper}>
<div className="bounce">
<p style={styles.text}> HELLO </p>
</div>
<Spiral />
</div>
</Fragment>
);
}
}
|
#!/bin/bash
# exit on error
set -e
CWD=$(pwd)
SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
function dofail {
cd $CWD
printf '%s\n' "$1" >&2 ## Send message to stderr. Exclude >&2 if you don't want it that way.
exit "${2-1}" ## Return a code specified by $2 or 1 by default.
}
LIREINDEXER_NUMTHREADS=8
LIREINDEXER_FEATURES=CEDD,FCTH,OpponentHistogram,ColorLayout,JCD,SimpleColorHistogram
# check parameters
if [ "$#" -lt 2 ]; then
dofail "USAGE: $0 srcDir indexDir\\n FATAL: requires 'srcDir', 'indexDir' with files to index 'import-XXXX' and index to write to\n\noptional parameters:\n- FEATURES default $LIREINDEXER_FEATURES\n- NUMTHREADS default $LIREINDEXER_NUMTHREADS\n" 1
exit 1
fi
INDEXSRC_MEDIADIR=$1
INDEXDIR=$2
if [ ! -d "$INDEXSRC_MEDIADIR" ]; then
echo "FATAL: srcDir '${INDEXSRC_MEDIADIR}' must exist"
exit 1
fi
# check optional parameters
if [ "$#" -gt 2 ]; then
LIREINDEXER_FEATURES=$3
fi
if [ "$#" -gt 3 ]; then
LIREINDEXER_NUMTHREADS=$4
fi
echo "start - indexing images"
echo "now: configure linux vars: run sbin/configure-environment.sh"
source ${SCRIPTPATH}/configure-environment.bash
echo "now: running indexing images from '$INDEXSRC_MEDIADIR' to '$INDEXDIR' features:'$LIREINDEXER_FEATURES' threads:'$LIREINDEXER_NUMTHREADS'"
cd ${LIRETOOLS}
./gradlew runIndexing --args="-i $INDEXSRC_MEDIADIR -l $INDEXDIR -n $LIREINDEXER_NUMTHREADS -f $LIREINDEXER_FEATURES"
cd ${CWD}
echo "done - indexing images"
|
<filename>client/src/components/Roster.tsx
import {useState, useRef, useEffect} from 'react';
import {ListNav} from 'keynav-web';
import './Roster.scss';
export function Roster(props) {
const [listNav, setListNav] = useState<any>(null);
const rosterRef = useRef<HTMLOListElement>(null);
// Add Keynav when component created (remove it when destroyed)
useEffect(() => {
if (rosterRef.current) {
setListNav(new ListNav({
listEl: rosterRef.current,
listItemsSelector: 'li'
}));
}
return () => {
if (listNav) {
listNav.removeBehavior();
setListNav(null);
}
}
// chatId needed so will update when Route changes (on diff chat shown)
}, [props.chatId]);
return (
<section className="roster">
<h3 className="sr-only">Roster</h3>
{props.roster.length === 0 &&
<div>No Users yet?</div>
}
{props.roster.length > 0 &&
<ol className="list" ref={rosterRef}>
{props.roster && props.roster.map(user => {
return (
<li className="item" key={user.id}>{user.username}</li>
)
})}
</ol>
}
</section>
)
}
|
#!/bin/bash
#########################################
#
# Not really maintained, and there are a few things to do:
# 1. Prompt for the IP address.
# 2. Prompt for the default search domain.
#
#########################################
set -e
if [[ "$EUID" -ne 0 ]]; then
echo "Sorry, this script must be run as root"
echo "Maybe try this:"
echo "curl https://raw.githubusercontent.com/KittDoesCode/wg-dashboard/master/install_script.sh | sudo bash"
exit
fi
# i = distributor id, s = short, gives us name of the os ("Ubuntu", "Debian", ...)
if [[ "$(lsb_release -is)" == "Ubuntu" ]]; then
# needed for add-apt-repository
apt-get install -y software-properties-common
# add wireguard repository to apt
add-apt-repository -y ppa:wireguard/wireguard
# install wireguard
apt-get install -y wireguard
# install linux kernel headers
apt-get install -y linux-headers-$(uname -r)
elif [[ "$(lsb_release -is)" == "Debian" ]]; then
if [[ "$(lsb_release -rs)" -ge "10" ]]; then
# add unstable list
echo "deb http://deb.debian.org/debian/ unstable main" > /etc/apt/sources.list.d/unstable.list
printf 'Package: *\nPin: release a=unstable\nPin-Priority: 90\n' > /etc/apt/preferences.d/limit-unstable
# update repository
apt update
# install linux kernel headers
apt-get install -y "linux-headers-$(uname -r)" ufw
# install wireguard
apt install -y wireguard
# update again (needed because of the linux kernel headers)
apt-get update && apt-get upgrade
else
echo "Sorry, your operating system is not supported"
exit
fi
else
echo "Sorry, your operating system is not supported"
exit
fi
# enable ipv4 packet forwarding
sysctl -w net.ipv4.ip_forward=1
echo "net.ipv4.ip_forward=1" >> /etc/sysctl.conf
# install nodejs
curl https://deb.nodesource.com/setup_10.x | bash
apt-get install -y nodejs
# go into home folder
cd /opt
# delete wg-dashboard folder and wg-dashboard.tar.gz to make sure it does not exist
rm -rf wg-dashboard
rm -rf wg-dashboard.tar.gz
# download wg-dashboard latest release
curl -L https://github.com/$(wget https://github.com/KittDoesCode/wg-dashboard/releases/latest -O - | egrep '/.*/.*/.*tar.gz' -o) --output wg-dashboard.tar.gz
# create directory for dashboard
mkdir -p wg-dashboard
# unzip wg-dashboard
tar -xzf wg-dashboard.tar.gz --strip-components=1 -C wg-dashboard
# delete unpacked .tar.gz
rm -f wg-dashboard.tar.gz
# go into wg-dashboard folder
cd wg-dashboard
# install node modules
npm i --production --unsafe-perm
# create service unit file
echo "[Unit]
Description=wg-dashboard service
After=network.target
[Service]
Restart=always
WorkingDirectory=/opt/wg-dashboard
ExecStart=/usr/bin/node /opt/wg-dashboard/src/server.js
[Install]
WantedBy=multi-user.target" > /etc/systemd/system/wg-dashboard.service
# reload systemd unit files
systemctl daemon-reload
# start wg-dashboard service on reboot
systemctl enable wg-dashboard
# start wg-dashboard service
systemctl start wg-dashboard
# enable port 22 in firewall for ssh
ufw allow 22
# enable firewall
ufw --force enable
# enable port 58210 in firewall for wireguard
ufw allow 58210
# enable port 53 in firewall for dns
ufw allow in on wg0 to any port 53
# Allow access to web server
ufw allow from any to any port 10000 proto tcp
# Install go-dnsmasq
cd /usr/local/sbin
if [[ "$(lsb_release -is)" == "Ubuntu" ]]; then
# download coredns
curl -L https://github.com/janeczku/go-dnsmasq/releases/download/1.0.7/go-dnsmasq_linux-amd64 --output go-dnsmasq
elif [[ "$(lsb_release -is)" == "Debian" ]]; then
# download coredns
curl -L https://github.com/janeczku/go-dnsmasq/releases/download/1.0.7/go-dnsmasq_linux-amd64 --output go-dnsmasq
fi
# change permissions
chmod 744 /usr/local/sbin/go-dnsmasq
# write autostart config
echo "
[Unit]
Description=Go-dnsmasq DNS Server
Documentation=https://github.com/janeczku/go-dnsmasq
After=network.target
[Service]
LimitNOFILE=8192
ExecStart=/usr/local/sbin/go-dnsmasq \
--listen 0.0.0.0 \
--search-domains [ad.domain] \
--enable-search \
--nameservers IP.AD.DR.ESS
Restart=on-failure
[Install]
WantedBy=multi-user.target" > /etc/systemd/system/go-dnsmasq.service
# disable systemd-resolved from startup
systemctl disable systemd-resolved
# stop systemd-resolved service
systemctl stop systemd-resolved
# enable go-dnsmasq on system start
systemctl enable go-dnsmasq
# start go-dnsmasq
systemctl start go-dnsmasq
# ** To be completed **
# Requires prompting for hostname, domain, etc.
# Recommendations (for now)
#echo "We currently recommend allowing port 80 (http) into this server to get a Let's Encrypt TLS certificate"
echo "Security audits have not been performed on the dashboard, so we don't recommend exposing it to the Internet"
# install nginx
#apt install nginx
# install site config
#
# install acme
#curl https://get.acme.sh | sh
# get certificate.
echo ""
echo ""
echo "=========================================================================="
echo ""
echo "> Done! WireGuard and wg-dashboard have been successfully installed"
#echo "> You can now connect to the dashboard via ssh tunnel by visiting:"
#echo ""
#echo -e "\t\thttp://localhost:3000"
#echo ""
#echo "> You can open an ssh tunnel from your local machine with this command:"
#echo ""
#echo -e "\t\tssh -L 3000:localhost:3000 <your_vps_user>@<your_vps_ip>"
#echo ""
#echo "> Please save this command for later, as you will need it to access the dashboard"
echo ""
echo "=========================================================================="
echo ""
echo ""
|
#!/bin/sh
set -e
# force sudo
if [ $EUID != 0 ]; then
sudo "$0" "$@"
exit $?
fi
# Install Vault Bin
echo "-- Installing vault"
unzip /tmp/files/vault_0.8.3_linux_386.zip
mv ./vault /usr/bin
# Try to help with tampering
echo "-- Setting bin permissions"
chown root:root /usr/bin/vault
chmod 555 /usr/bin/vault
# Allow for vault to use mlock without root
sudo setcap cap_ipc_lock=+ep $(readlink -f $(which vault))
# Make Vault Immutable
chattr +i /usr/bin/vault
echo "-- Making Vault User & Deny SSH"
useradd vault -mUc 'Vault Runtime User'
mkdir '/home/vault/.ssh/'
chown root:root '/home/vault/.ssh/'
chmod 000 '/home/vault/.ssh/'
touch '/home/vault/.ssh/authorized_keys'
chattr +i '/home/vault/.ssh/' '/home/vault/.ssh/authorized_keys'
echo "-- Installing Damon"
mv /tmp/files/vaultd /etc/init.d/vaultd
chown root:root /etc/init.d/vaultd
chmod 751 /etc/init.d/vaultd
chattr +i /etc/init.d/vaultd
mv /tmp/files/vault.conf /etc/init/vault.conf
chmod 644 /etc/init/vault.conf
chattr +i /etc/init/vault.conf
echo "-- Registering for boot."
chkconfig --add vaultd
echo "-- Allow vault traffic in IP tables"
iptables -A INPUT -p tcp --dport 8200 -m state --state NEW,ESTABLISHED -j ACCEPT
iptables -A OUTPUT -p tcp --sport 8200 -m state --state NEW,ESTABLISHED -j ACCEPT
iptables -A INPUT -p tcp --dport 8201 -m state --state NEW,ESTABLISHED -j ACCEPT
iptables -A OUTPUT -p tcp --sport 8201 -m state --state NEW,ESTABLISHED -j ACCEPT
service iptables save
echo "-- Disable SSH on reboot"
# chkconfig off sshd
# TODO: Auto CP Cert into image on boot
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-SS/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-SS/7-1024+0+512-N-VB-ADJ-ADV-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_verbs_adjectives_and_adverbs_first_two_thirds_sixth --eval_function penultimate_sixth_eval
|
<reponame>wenfang6/XSQL<filename>sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveDirCommand.scala<gh_stars>100-1000
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.hive.common.FileUtils
import org.apache.hadoop.hive.ql.plan.TableDesc
import org.apache.hadoop.hive.serde.serdeConstants
import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe
import org.apache.spark.SparkException
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.hive.client.HiveClientImpl
import org.apache.spark.sql.util.SchemaUtils
/**
* Command for writing the results of `query` to file system.
*
* The syntax of using this command in SQL is:
* {{{
* INSERT OVERWRITE [LOCAL] DIRECTORY
* path
* [ROW FORMAT row_format]
* [STORED AS file_format]
* SELECT ...
* }}}
*
* @param isLocal whether the path specified in `storage` is a local directory
* @param storage storage format used to describe how the query result is stored.
* @param query the logical plan representing data to write to
* @param overwrite whether overwrites existing directory
*/
case class InsertIntoHiveDirCommand(
isLocal: Boolean,
storage: CatalogStorageFormat,
query: LogicalPlan,
overwrite: Boolean,
outputColumnNames: Seq[String]) extends SaveAsHiveFile {
override def run(sparkSession: SparkSession, child: SparkPlan): Seq[Row] = {
assert(storage.locationUri.nonEmpty)
SchemaUtils.checkColumnNameDuplication(
outputColumnNames,
s"when inserting into ${storage.locationUri.get}",
sparkSession.sessionState.conf.caseSensitiveAnalysis)
val hiveTable = HiveClientImpl.toHiveTable(CatalogTable(
identifier = TableIdentifier(storage.locationUri.get.toString, Some("default")),
tableType = org.apache.spark.sql.catalyst.catalog.CatalogTableType.VIEW,
storage = storage,
schema = outputColumns.toStructType
))
hiveTable.getMetadata.put(serdeConstants.SERIALIZATION_LIB,
storage.serde.getOrElse(classOf[LazySimpleSerDe].getName))
val tableDesc = new TableDesc(
hiveTable.getInputFormatClass,
hiveTable.getOutputFormatClass,
hiveTable.getMetadata
)
val hadoopConf = sparkSession.sessionState.newHadoopConf()
val targetPath = new Path(storage.locationUri.get)
val qualifiedPath = FileUtils.makeQualified(targetPath, hadoopConf)
val (writeToPath: Path, fs: FileSystem) =
if (isLocal) {
val localFileSystem = FileSystem.getLocal(hadoopConf)
(localFileSystem.makeQualified(targetPath), localFileSystem)
} else {
val dfs = qualifiedPath.getFileSystem(hadoopConf)
(qualifiedPath, dfs)
}
if (!fs.exists(writeToPath)) {
fs.mkdirs(writeToPath)
}
// The temporary path must be a HDFS path, not a local path.
val tmpPath = getExternalTmpPath(sparkSession, hadoopConf, qualifiedPath)
val fileSinkConf = new org.apache.spark.sql.hive.HiveShim.ShimFileSinkDesc(
tmpPath.toString, tableDesc, false)
try {
saveAsHiveFile(
sparkSession = sparkSession,
plan = child,
hadoopConf = hadoopConf,
fileSinkConf = fileSinkConf,
outputLocation = tmpPath.toString)
if (overwrite && fs.exists(writeToPath)) {
fs.listStatus(writeToPath).foreach { existFile =>
if (Option(existFile.getPath) != createdTempDir) fs.delete(existFile.getPath, true)
}
}
val dfs = tmpPath.getFileSystem(hadoopConf)
dfs.listStatus(tmpPath).foreach {
tmpFile =>
if (isLocal) {
dfs.copyToLocalFile(tmpFile.getPath, writeToPath)
} else {
dfs.rename(tmpFile.getPath, writeToPath)
}
}
} catch {
case e: Throwable =>
throw new SparkException(
"Failed inserting overwrite directory " + storage.locationUri.get, e)
} finally {
deleteExternalTmpPath(hadoopConf)
}
Seq.empty[Row]
}
}
|
#!/usr/bin/env bash
# Copyright 2014 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
set -e
function script_location() {
local script_location="${BASH_SOURCE[0]}"
# Resolve symlinks
while [[ -h "$script_location" ]]; do
DIR="$(cd -P "$( dirname "$script_location")" >/dev/null && pwd)"
script_location="$(readlink "$script_location")"
[[ "$script_location" != /* ]] && script_location="$DIR/$script_location"
done
echo "$(cd -P "$(dirname "$script_location")" >/dev/null && pwd)"
}
function generate_docs() {
# Install and activate dartdoc.
# NOTE: When updating to a new dartdoc version, please also update
# `dartdoc_options.yaml` to include newly introduced error and warning types.
"$PUB" global activate dartdoc 0.35.0
# This script generates a unified doc set, and creates
# a custom index.html, placing everything into dev/docs/doc.
(cd "$FLUTTER_ROOT/dev/tools" && "$FLUTTER" pub get)
(cd "$FLUTTER_ROOT/dev/tools" && "$PUB" get)
(cd "$FLUTTER_ROOT" && "$DART" --disable-dart-dev --enable-asserts "$FLUTTER_ROOT/dev/tools/dartdoc.dart")
(cd "$FLUTTER_ROOT" && "$DART" --disable-dart-dev --enable-asserts "$FLUTTER_ROOT/dev/tools/java_and_objc_doc.dart")
}
# Zip up the docs so people can download them for offline usage.
function create_offline_zip() {
# Must be run from "$FLUTTER_ROOT/dev/docs"
echo "$(date): Zipping Flutter offline docs archive."
rm -rf flutter.docs.zip doc/offline
(cd ./doc; zip -r -9 -q ../flutter.docs.zip .)
}
# Generate the docset for Flutter docs for use with Dash, Zeal, and Velocity.
function create_docset() {
# Must be run from "$FLUTTER_ROOT/dev/docs"
# Must have dashing installed: go get -u github.com/technosophos/dashing
# Dashing produces a LOT of log output (~30MB), so we redirect it, and just
# show the end of it if there was a problem.
echo "$(date): Building Flutter docset."
rm -rf flutter.docset
# If dashing gets stuck, Cirrus will time out the build after an hour, and we
# never get to see the logs. Thus, we run it in the background and tail the logs
# while we wait for it to complete.
dashing_log=/tmp/dashing.log
dashing build --source ./doc --config ./dashing.json > $dashing_log 2>&1 &
dashing_pid=$!
wait $dashing_pid && \
cp ./doc/flutter/static-assets/favicon.png ./flutter.docset/icon.png && \
"$DART" --disable-dart-dev --enable-asserts ./dashing_postprocess.dart && \
tar cf flutter.docset.tar.gz --use-compress-program="gzip --best" flutter.docset
if [[ $? -ne 0 ]]; then
>&2 echo "Dashing docset generation failed"
tail -200 $dashing_log
exit 1
fi
}
function deploy_docs() {
# Ensure google webmaster tools can verify our site.
cp "$FLUTTER_ROOT/dev/docs/google2ed1af765c529f57.html" "$FLUTTER_ROOT/dev/docs/doc"
case "$LUCI_BRANCH" in
master)
echo "$(date): Updating $LUCI_BRANCH docs: https://master-api.flutter.dev/"
# Disable search indexing on the master staging site so searches get only
# the stable site.
echo -e "User-agent: *\nDisallow: /" > "$FLUTTER_ROOT/dev/docs/doc/robots.txt"
;;
stable)
echo "$(date): Updating $LUCI_BRANCH docs: https://api.flutter.dev/"
# Enable search indexing on the master staging site so searches get only
# the stable site.
echo -e "# All robots welcome!" > "$FLUTTER_ROOT/dev/docs/doc/robots.txt"
;;
*)
>&2 echo "Docs deployment cannot be run on the $LUCI_BRANCH branch."
exit 0
esac
}
# Move the offline archives into place, after all the processing of the doc
# directory is done. This avoids the tools recursively processing the archives
# as part of their process.
function move_offline_into_place() {
# Must be run from "$FLUTTER_ROOT/dev/docs"
echo "$(date): Moving offline data into place."
mkdir -p doc/offline
mv flutter.docs.zip doc/offline/flutter.docs.zip
du -sh doc/offline/flutter.docs.zip
if [[ "$LUCI_BRANCH" == "stable" ]]; then
echo -e "<entry>\n <version>${FLUTTER_VERSION}</version>\n <url>https://api.flutter.dev/offline/flutter.docset.tar.gz</url>\n</entry>" > doc/offline/flutter.xml
else
echo -e "<entry>\n <version>${FLUTTER_VERSION}</version>\n <url>https://master-api.flutter.dev/offline/flutter.docset.tar.gz</url>\n</entry>" > doc/offline/flutter.xml
fi
mv flutter.docset.tar.gz doc/offline/flutter.docset.tar.gz
du -sh doc/offline/flutter.docset.tar.gz
}
# So that users can run this script from anywhere and it will work as expected.
SCRIPT_LOCATION="$(script_location)"
# Sets the Flutter root to be "$(script_location)/../..": This script assumes
# that it resides two directory levels down from the root, so if that changes,
# then this line will need to as well.
FLUTTER_ROOT="$(dirname "$(dirname "$SCRIPT_LOCATION")")"
echo "$(date): Running docs.sh"
if [[ ! -d "$FLUTTER_ROOT" || ! -f "$FLUTTER_ROOT/bin/flutter" ]]; then
>&2 echo "Unable to locate the Flutter installation (using FLUTTER_ROOT: $FLUTTER_ROOT)"
exit 1
fi
FLUTTER_BIN="$FLUTTER_ROOT/bin"
DART_BIN="$FLUTTER_ROOT/bin/cache/dart-sdk/bin"
FLUTTER="$FLUTTER_BIN/flutter"
DART="$DART_BIN/dart"
PUB="$DART_BIN/pub"
export PATH="$FLUTTER_BIN:$DART_BIN:$PATH"
# Make sure dart is installed by invoking flutter to download it.
"$FLUTTER" --version
FLUTTER_VERSION=$(cat "$FLUTTER_ROOT/version")
# If the pub cache directory exists in the root, then use that.
FLUTTER_PUB_CACHE="$FLUTTER_ROOT/.pub-cache"
if [[ -d "$FLUTTER_PUB_CACHE" ]]; then
# This has to be exported, because pub interprets setting it to the empty
# string in the same way as setting it to ".".
export PUB_CACHE="${PUB_CACHE:-"$FLUTTER_PUB_CACHE"}"
fi
generate_docs
# Skip publishing docs for PRs and release candidate branches
if [[ -n "$LUCI_CI" && -z "$LUCI_PR" ]]; then
(cd "$FLUTTER_ROOT/dev/docs"; create_offline_zip)
(cd "$FLUTTER_ROOT/dev/docs"; create_docset)
(cd "$FLUTTER_ROOT/dev/docs"; move_offline_into_place)
deploy_docs
fi
|
<gh_stars>100-1000
// Copyright 2019 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package tidbinitializer
import (
"github.com/pingcap/tidb-operator/pkg/apis/pingcap/v1alpha1"
"github.com/pingcap/tidb-operator/pkg/manager/member"
)
// ControlInterface reconciles TidbInitializer
type ControlInterface interface {
// ReconcileTidbInitializer implements the reconcile logic of TidbInitializer
ReconcileTidbInitializer(ti *v1alpha1.TidbInitializer) error
}
// NewDefaultTidbInitializerControl returns a new instance of the default TidbInitializer ControlInterface
func NewDefaultTidbInitializerControl(manager member.InitManager) ControlInterface {
return &defaultTidbInitializerControl{manager}
}
type defaultTidbInitializerControl struct {
tidbInitManger member.InitManager
}
func (c *defaultTidbInitializerControl) ReconcileTidbInitializer(ti *v1alpha1.TidbInitializer) error {
return c.tidbInitManger.Sync(ti)
}
var _ ControlInterface = &defaultTidbInitializerControl{}
// FakeTidbInitializerControl is a fake TidbInitializer ControlInterface
type FakeTidbInitializerControl struct {
err error
}
// NewFakeTidbInitializerControl returns a FakeTidbInitializerControl
func NewFakeTidbInitializerControl() *FakeTidbInitializerControl {
return &FakeTidbInitializerControl{}
}
// SetReconcileTidbInitializerError sets error for TidbInitializerControl
func (tic *FakeTidbInitializerControl) SetReconcileTidbInitializerError(err error) {
tic.err = err
}
// ReconcileTidbInitializer fake ReconcileTidbInitializer
func (tic *FakeTidbInitializerControl) ReconcileTidbInitializer(ti *v1alpha1.TidbInitializer) error {
if tic.err != nil {
return tic.err
}
return nil
}
var _ ControlInterface = &FakeTidbInitializerControl{}
|
#!/usr/bin/env bash
echo "testing 1 2 3..."
curl "etportfolio.duckdns.org/health/"
curl -X POST -d "username=testUsername" "https://etportfolio.duckdns.org/register" -L
curl -X POST -d "username=testPassword" "https://etportfolio.duckdns.org/register" -L
curl -X POST -d "username=testUsername&password=testPassword" "https://etportfolio.duckdns.org/register" -L
curl -X POST -d "username=testUsername" "https://etportfolio.duckdns.org/login" -L
curl -X POST -d "password=testPassword" "https://etportfolio.duckdns.org/login" -L
curl -X POST -d "username=testUsername&password=testPassword" "https://etportfolio.duckdns.org/login" -L
|
import React from 'react'
import { makeStyles } from '@material-ui/core/styles'
import { AppBar, Toolbar, Typography } from '@material-ui/core';
import MainTabs from './MainTabs';
import Logo from './Logo';
const useStyles = makeStyles((theme) => ({
toolbar: {
flexGrow: 1,
backgroundColor: '#000000',
justifyContent: 'center',
},
tabs: {
flexGrow: 0 ,
marginLeft: theme.spacing(20)
},
title:{
marginRight: '18%'
},
logo:{
flexGrow: 0,
marginLeft: theme.spacing(70),
}
}))
const TopBar = () => {
const classes = useStyles();
return (
<div>
<AppBar position="fixed">
<Toolbar className={classes.toolbar}>
<Typography variant="h6" className={classes.title}>NorsuisoSquad</Typography>
<MainTabs className={classes.tabs} />
<Logo width={50} className={classes.logo}/>
</Toolbar>
</AppBar>
</div>
)
}
export default TopBar
|
#!/bin/bash
##
# script to make docker containers' configuration persist between reboots of the firewalla box
# the script must be created at /home/pi/.firewalla/config/post_main.d/start_[service-name].sh
##
# as per our own configuration, the docker root has been moved to the ssd drive
# so, after every reboot, we must check whether or not, the drive is mounted
# and the /var/lib/docker directory has been copied to the new docker root path
# before starting the docker containers
##
# args
SRVCNAME='unifi'
TMPDIR='/tmp'
MNTDIR='/mnt/data'
CHCK_FILE='/.do_not_remove_this_file'
LOG_FILE="/tmp/docker@$SRVCNAME.log"
DATADIR="$MNTDIR/docker/$SRVCNAME/data"
USRNAME='pi'
DCKRGROUP='docker'
##
# start unifi docker container
# 1. check for access to mount point
# 2. check for access to data dir
# 3. start docker daemon
# 4. spin up the docker container
# 5. add routing rules
# 6. add dnsmasq entry
# 5. end
##
# start the script
printf "%s\n" "script has started..."
##
# check for the ssd hdd mount point
#
printf "%b\n" "\naccessing $MNTDIR$CHCK_FILE..."
if [[ -f $MNTDIR$CHCK_FILE ]]; then
printf "%s\n" " - $MNTDIR$CHCK_FILE is accessible... ok"
else
printf "%s\n" " * - couldn't access $MNTDIR$CHCK_FILE... something is wrong"
printf "%b\n" "$(date +%F) - couldn't access $MNTDIR$CHCK_FILE... something is wrong" >> $LOG_FILE
printf "%s\n" " - let's run the move docker root script which also will call the ssd hdd mounting script anyways..."
./move_docker_root.sh
sleep 5
if [[ -f $MNTDIR$CHCK_FILE ]]; then
printf "%s\n" " - $MNTDIR$CHCK_FILE is accessible... ok"
else
printf "%s\n" " * - couldn't access $MNTDIR$CHCK_FILE... something is wrong"
printf "%b\n" "$(date +%F) - couldn't access $MNTDIR$CHCK_FILE... something is wrong" >> $LOG_FILE
exit 1
fi
fi
cd $TMPDIR
printf "%s\n" "moved to $(pwd)"
# check for access to the data dir
printf "%b\n" "\naccessing $DATADIR..."
if [[ -d $DATADIR ]]; then
sudo chmod -R 775 $DATADIR && sudo chown -R $USRNAME:$DCKRGROUP $DATADIR
printf "%s\n" " - $DATADIR is accessible, permissions applied and group ownership updated... ok"
else
sudo mkdir -p $DATADIR && sudo chmod -R 775 $DATADIR && sudo chown -R $USRNAME:$DCKRGROUP $DATADIR
printf "%s\n" " - $DATADIR has been created, permissions applied and group ownership updated... ok"
printf "%s\n" " * - no docker-compose.yaml file has been found, exiting now..."
exit 1
fi
# check if the docker daemon is running
#
printf "%b\n" "\nstarting the docker daemon..."
if (! sudo docker stats --no-stream ); then
sudo systemctl start docker
sleep 5
#wait until docker daemon is running and has completed initialisation
while (! sudo docker stats --no-stream ); do
# docker takes a few seconds to initialize
printf "%s\n" " - waiting for docker to launch..."
sleep 5
done
sudo docker system prune -a -f --volumes
printf "%s\n" " - docker daemon restarted... ok"
else
sudo systemctl restart docker
sleep 5
printf "%s\n" " - docker daemon is running... ok"
fi
# start the docker container
#
sudo systemctl start docker-compose@$SRVCNAME
# add routing rules for docker network
#
sudo ipset create -! docker_lan_routable_net_set hash:net
sudo ipset add -! docker_lan_routable_net_set 172.16.1.0/24
sudo ipset create -! docker_wan_routable_net_set hash:net
sudo ipset add -! docker_wan_routable_net_set 172.16.1.0/24
# add local dnsmasq entry
#
sudo printf "%b\n" "address=/$SRVCNAME/172.16.1.2" > ~/.firewalla/config/dnsmasq_local/$SRVCNAME
# restart dns service
sudo systemctl restart firerouter_dns
# finished starting the docker container
printf "%b\n" "\nstart $SRVCNAME script has ended..."
##
|
package parser
import (
"github.com/jensneuse/graphql-go-tools/pkg/document"
"github.com/jensneuse/graphql-go-tools/pkg/lexing/keyword"
)
func (p *Parser) parsePeekedBoolValue(value *document.Value) {
tok := p.l.Read()
value.Raw = tok.Literal
if tok.Keyword == keyword.FALSE {
value.Reference = 0
} else {
value.Reference = 1
}
}
|
<reponame>zhengtingke/personal_blog
/*
* jQuery Equal Height
* Author: <NAME>
* Github: https://github.com/susonwaiba/jquery-equal-height
* URL: http://susonwaiba.com
*/
$.fn.jQueryEqualHeight = function(innerDiv) {
if (innerDiv == undefined) {
innerDiv = '.card';
}
var currentTallest = 0, currentRowStart = 0, rowDivs = new Array(), topPosition = 0;
$(this).each(function() {
$(this).find(innerDiv).height('auto')
topPosition = $(this).position().top;
if (currentRowStart != topPosition) {
for (currentDiv = 0 ; currentDiv < rowDivs.length ; currentDiv++) {
rowDivs[currentDiv].find(innerDiv).height(currentTallest);
}
rowDivs.length = 0;
currentRowStart = topPosition;
currentTallest = $(this).find(innerDiv).height();
rowDivs.push($(this));
} else {
rowDivs.push($(this));
currentTallest = (currentTallest < $(this).find(innerDiv).height()) ? ($(this).find(innerDiv).height()) : (currentTallest);
}
for (currentDiv = 0 ; currentDiv < rowDivs.length ; currentDiv++) {
rowDivs[currentDiv].find(innerDiv).height(currentTallest);
}
});
};
|
<reponame>Emi691/museum-collections-app
class TreatmentsController < ApplicationController
get '/treatments/:id/mark-as-done' do
@treatment = Treatment.find_by(params)
@piece = @treatment.piece
if logged_in? && @piece.user == current_user
if @piece.condition == @treatment.start_condition
flash[:message] = "Please update condition to mark treatments as done."
redirect to :"/pieces/#{@piece.id}"
else
@treatment.update(done: "true", end_condition: "#{@piece.condition}")
@treatment.save
flash[:message] = "treatment marked as done"
redirect to :"/pieces/#{@piece.id}"
end
else
redirect to :"/pieces/#{@piece.id}"
end
end
get '/treatments/:id/remove' do
@treatment = Treatment.find_by(params)
@piece = @treatment.piece
if logged_in? && @piece.user == current_user
@treatment.delete
flash[:message] = "Treatment successfully deleted."
redirect to :"/pieces/#{@piece.id}"
else
flash[:message] = "You cannot delete this treatment."
redirect to :"/pieces/#{@piece.id}"
end
end
end
|
#!/bin/sh
# Package
PACKAGE="vim"
DNAME="Vim"
case $1 in
start)
exit 0
;;
stop)
exit 0
;;
status)
exit 1
;;
log)
exit 1
;;
*)
exit 1
;;
esac
|
def find_difference(arr1, arr2):
return list(set(arr1) - set(arr2))
differences = find_difference(sample_array_1, sample_array_2)
|
<gh_stars>1-10
package back_tracking;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/**
*
* @author minchoba
* 백준 14889번: 스타트와 링크
*
* @see https://www.acmicpc.net/problem/14889/
*
*/
public class Boj14889 {
private static int diff = Integer.MAX_VALUE;
private static boolean[] isVisited;
private static int[][] poten;
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
poten = new int[N][N];
isVisited = new boolean[N];
for(int i = 0; i < N; i++) {
StringTokenizer st = new StringTokenizer(br.readLine());
for(int j = 0; j < N; j++) {
poten[i][j] = Integer.parseInt(st.nextToken());
}
}
backTracking(N, 0, N / 2, 1);
System.out.println(diff);
}
private static void backTracking(int n, int current, int depth, int count) {
if(current >= n) return;
if(isVisited[current]) return;
isVisited[current] = true;
if(depth == count) {
int[][] tmp = getPermutation(n, depth); // 방문 배열로 만들어진 순열 저장 (팀 구성)
int aTeam = getTeamPotential(tmp[0], depth); // 구성된 팀 마다 값을 구함
int bTeam = getTeamPotential(tmp[1], depth);
diff = Math.min(diff, Math.abs(aTeam - bTeam)); // 최소 차이
return;
}
for(int next = current + 1; next < n; next++) {
if(isVisited[next]) continue;
backTracking(n, next, depth, count + 1);
isVisited[next] = false; // backTracking
}
}
private static int[][] getPermutation(int n, int size){
int[][] arr = new int[2][size];
int idx1 = 0, idx2 = 0;
for(int i = 0; i < n; i++) {
if(isVisited[i]) arr[0][idx1++] = i;
else arr[1][idx2++] = i;
}
return arr;
}
private static int getTeamPotential(int[] arr, int size) {
int sum = 0;
for(int i = 0; i < size; i++) {
for(int j = i + 1; j < size; j++) {
sum += poten[arr[i]][arr[j]] + poten[arr[j]][arr[i]];
}
}
return sum;
}
}
|
package io.opensphere.core.security.options;
import java.awt.Component;
import java.awt.EventQueue;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.SwingUtilities;
import javax.swing.SwingWorker;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableModel;
import org.apache.log4j.Logger;
import io.opensphere.core.SecurityManager;
import io.opensphere.core.options.OptionsProvider;
import io.opensphere.core.preferences.PreferencesRegistry;
import io.opensphere.core.quantify.Quantify;
import io.opensphere.core.security.config.v1.SecurityConfiguration;
import io.opensphere.core.util.collections.CollectionUtilities;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.lang.ImpossibleException;
import io.opensphere.core.util.lang.StringUtilities;
import io.opensphere.core.util.security.CertificateUtilities;
/**
* An {@link OptionsProvider} for trusted server certs.
*/
public class TrustedCertificatesOptionsProvider extends AbstractTableOptionsProvider
{
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(TrustedCertificatesOptionsProvider.class);
/**
* Construct the security options provider.
*
* @param securityManager The system security manager.
* @param prefsRegistry The system preferences registry.
*/
public TrustedCertificatesOptionsProvider(SecurityManager securityManager, PreferencesRegistry prefsRegistry)
{
super(securityManager, prefsRegistry, "Trusted Certificates");
}
@Override
protected TableModel buildTableModel()
{
final Object[] columnIdentifiers = new Object[] { "Source", "Issued To", "Issued By", "Valid End" };
final DefaultTableModel model = new DefaultTableModel((Object[][])null, columnIdentifiers)
{
/** Serial version UID. */
private static final long serialVersionUID = 1L;
@Override
public boolean isCellEditable(int row, int column)
{
return false;
}
};
SwingWorker<Object[][], Void> worker = new SwingWorker<>()
{
@Override
protected Object[][] doInBackground()
{
Set<? extends X509Certificate> userTrustedServerCerts = New.set(getSecurityManager().getUserTrustedServerCerts());
Collection<X509Certificate> trustedServerCerts = getSecurityManager().getTrustedServerCerts();
Object[][] data = new Object[trustedServerCerts.size()][];
int index = 0;
for (X509Certificate cert : trustedServerCerts)
{
data[index] = new Object[4];
data[index][0] = userTrustedServerCerts.contains(cert) ? "User" : "System";
data[index][1] = CertificateUtilities.getLastDistinguishedNamePart(cert.getSubjectDN().getName());
data[index][2] = CertificateUtilities.getLastDistinguishedNamePart(cert.getIssuerDN().getName());
data[index][3] = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z").format(cert.getNotAfter());
++index;
}
return data;
}
@Override
protected void done()
{
try
{
Object[][] data = get();
model.setDataVector(data, columnIdentifiers);
}
catch (InterruptedException e)
{
throw new ImpossibleException(e);
}
catch (ExecutionException e)
{
LOGGER.debug("Execution exception encountered.", e);
if (e.getCause() instanceof Error)
{
throw (Error)e.getCause();
}
throw (RuntimeException)e.getCause();
}
}
};
worker.execute();
return model;
}
@Override
protected boolean canDeleteRows(int[] selectedRows)
{
for (int row : selectedRows)
{
byte[] encoded;
try
{
encoded = getEncodedCert(row);
}
catch (CertificateEncodingException e)
{
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("The supplied certificate from row '" + row + "' encountered an encoding exception.", e);
}
return false;
}
boolean found = false;
Collection<byte[]> userTrustedCerts = getConfig().getUserTrustedCerts();
for (byte[] encoded2 : userTrustedCerts)
{
if (Arrays.equals(encoded, encoded2))
{
found = true;
break;
}
}
if (!found)
{
return false;
}
}
return true;
}
@Override
protected void deleteRow(int row)
{
Quantify.collectMetric("mist3d.settings.security.trusted-certificates.delete-button");
byte[] encoded;
try
{
encoded = getEncodedCert(row);
}
catch (CertificateEncodingException e)
{
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("The supplied certificate from row '" + row + "' encountered an encoding exception.", e);
}
JOptionPane.showMessageDialog(SwingUtilities.getWindowAncestor(getTable()), "The certificate could not be encoded.",
"Error", JOptionPane.ERROR_MESSAGE);
return;
}
boolean found = false;
SecurityConfiguration config = getConfig().clone();
Collection<byte[]> userTrustedCerts = config.getUserTrustedCerts();
for (Iterator<byte[]> iter = userTrustedCerts.iterator(); iter.hasNext() && !found;)
{
byte[] encoded2 = iter.next();
if (Arrays.equals(encoded, encoded2))
{
found = true;
iter.remove();
}
}
if (found)
{
saveConfig(config);
}
else
{
JOptionPane.showMessageDialog(SwingUtilities.getWindowAncestor(getTable()),
"That is a system certificate and cannot be deleted.", "Error", JOptionPane.ERROR_MESSAGE);
}
}
@Override
protected String getDeleteToolTipText()
{
return "Select a row to delete. Only \"User\" certificates can be deleted.";
}
@Override
protected Component getDescriptionComponent()
{
return new JLabel(
"<html>These are certificates that you trust. Other certificates linked to these certificates will also be trusted implicitly.</html>");
}
/**
* Get the encoded certificate on a certain row.
*
* @param row The row.
* @return The encoded certificate.
* @throws CertificateEncodingException If the certificate cannot be
* encoded.
*/
protected byte[] getEncodedCert(int row) throws CertificateEncodingException
{
return CollectionUtilities.getItem(getSecurityManager().getTrustedServerCerts(), row).getEncoded();
}
@Override
protected boolean showDeleteMessageDialog()
{
assert EventQueue.isDispatchThread();
return JOptionPane.showOptionDialog(SwingUtilities.getWindowAncestor(getTable()),
"If you delete this certificate, you will be prompted to connect to servers that use it. Are you sure?",
"Confirm Delete", JOptionPane.OK_CANCEL_OPTION, JOptionPane.WARNING_MESSAGE, null, null,
null) == JOptionPane.OK_OPTION;
}
@Override
protected void showDetails(int row)
{
Quantify.collectMetric("mist3d.settings.security.trusted-certificates.details-button");
assert EventQueue.isDispatchThread();
X509Certificate item = CollectionUtilities.getItem(getSecurityManager().getTrustedServerCerts(), row);
String detail = StringUtilities.convertToHTMLTable(CertificateUtilities.getDetailString("", item));
JOptionPane.showMessageDialog(SwingUtilities.getWindowAncestor(getTable()), detail, "Certificate Details",
JOptionPane.INFORMATION_MESSAGE);
}
}
|
<reponame>Developerayo/opensource-design-workshop-futuresync
var kStageSizeDidChangeEvent = "DisplayManager:StageSizeDidChangeEvent";
var kTimeoutValueForCursor = 1000;
var kMobilePortraitModeHorizontalMargin = 8;
var kMobilePortraitModeTopMargin = 47;
var kMobilePortraitModeVerticalCenterLine = 161;
var kMobilePortraitModeMaxStageHeight = 228;
var kMobilePortraitMaxStageHeight = 0;
var kMobilePortraitMaxStageWidth = 0;
var kMobileLandscapeModeVerticalMargin = 7;
var kMobileLandscapeModeHorizontallMargin = 15;
var kBottomButtonHeight = 50;
var kNavigationArrowSize = 27;
var kNavigationAreaHeight = kNavigationArrowSize;
var kHelpAreaHeight = 16;
var kMobilePortraitModeVerticalCenterLineToNavigationAreaGap = 148;
var kStageToNavigationAreaGap = 31;
var kNavigationAreaToHelpAreaGap = 52;
var kHelpAreaToBottomGap = 12;
var kMobilePortraitModeNavigationAreaSideMargin = 32;
var kMobilePortraitModeHelpAreaSideMargin = 16;
var kMobileLandscapeModeMinSideSpacerWidth = kNavigationArrowSize + 10;
var kPadPortraitModeHorizontalMargin = 8;
var kPadPortraitModeMaxStageHeight = 540;
var kPadPortraitModeVerticalCenterLine = 400;
var kPadLandscapeModeHorizontallMargin = 15;
var kPadLandscapeModeVerticalMargin = 7;
var DisplayManager = Class.create({
initialize: function () {
document.observe(
kShowSizeDidChangeEvent,
this.handleShowSizeDidChangeEvent.bind(this)
);
document.observe(
kOrientationChangedEvent,
this.handleOrientationDidChangeEvent.bind(this)
);
this.body = document.getElementById("body");
this.stageArea = document.getElementById("stageArea");
this.stage = document.getElementById("stage");
this.hyperlinkPlane = document.getElementById("hyperlinkPlane");
this.waitingIndicator = document.getElementById("waitingIndicator");
this.helpText = document.getElementById("helpText");
this.previousButton = document.getElementById("previousButton");
this.nextButton = document.getElementById("nextButton");
this.slideCounter = document.getElementById("slideCounter");
this.waitingIndicatorTimeout = null;
this.orientation = kOrientationUnknown;
this.showWidth = 0;
this.showHeight = 0;
this.stageAreaWidth = 0;
this.stageAreaHeight = 0;
this.stageAreaTop = 0;
this.stageAreaLeft = 0;
this.usableDisplayWidth = 0;
this.usableDisplayHeight = 0;
this.inLaunchMode = true;
this.initialAddressBarScrollPerformed = false;
this.updateUsableDisplayArea();
this.positionWaitingIndicator();
this.showWaitingIndicator();
this.hyperlinksOnly = false;
this.showStatisticsDisplay = gIpad && getUrlParameter("statistics") === "1";
this.hasCacheEverGoneOverPixelLimit = false;
this.hhasStageEverGoneOverPixelLimit = false;
this.cacheHighWaterMark = 0;
this.stageHighWaterMark = 0;
if (gMode === kModeMobile) {
this.stageArea.style.backgroundColor = "black";
this.helpText.innerHTML = kTapOrSwipeToAdvance;
} else {
Event.observe(this.body, "mousemove", this.handleMouseMove.bind(this));
this.lastMouseX = -1;
this.lastMouseY = -1;
this.cursorTimeout = null;
this.setTimeoutForCursor();
}
},
setHyperlinksOnlyMode: function () {
this.hyperlinksOnly = true;
this.setPreviousButtonEnabled(false);
this.setNextButtonEnabled(false);
this.helpText.style.display = "none";
},
handleMouseMove: function (a) {
a = a || window.event;
var b =
Math.abs(this.lastMouseX - a.clientX) +
Math.abs(this.lastMouseY - a.clientY);
if (b > 10) {
if (this.cursorIsShowing === false) {
this.showCursor();
} else {
if (!this.navigatorIsShowing) {
this.setTimeoutForCursor();
}
}
} else {
if (!this.navigatorIsShowing) {
this.setTimeoutForCursor();
}
}
this.lastMouseX = a.clientX;
this.lastMouseY = a.clientY;
},
updateSlideNumber: function (b, a) {
var d = "";
var c = null;
if (gMode != kModeDesktop) {
d = kSlideLabel + " " + b + "/" + a;
c = this.slideCounter;
}
if (c != null) {
c.innerHTML = d;
}
},
handleShowSizeDidChangeEvent: function (a) {
this.showWidth = a.memo.width;
this.showHeight = a.memo.height;
this.layoutDisplay();
},
handleOrientationDidChangeEvent: function (a) {
this.orientation = a.memo.orientation;
clearTimeout(this.resizeTimer);
this.resizeTimer = setTimeout(
this.handleOrientationDidChangeEvent_partTwo.bind(this),
300
);
},
handleOrientationDidChangeEvent_partTwo: function () {
this.layoutDisplay();
if (this.inLaunchMode === false) {
this.showApplicableControls();
}
},
showCursor: function () {
if (this.inLaunchMode) {
return;
}
this.body.style.cursor = "default";
this.cursorIsShowing = true;
this.setTimeoutForCursor();
},
hideCursor: function () {
this.body.style.cursor = "none";
this.cursorIsShowing = false;
},
setTimeoutForCursor: function () {
if (this.cursorTimeout) {
clearTimeout(this.cursorTimeout);
}
this.cursorTimeout = setTimeout(
this.handleTimeoutForCursor.bind(this),
kTimeoutValueForCursor
);
},
clearTimeoutForCursor: function () {
if (this.cursorTimeout) {
clearTimeout(this.cursorTimeout);
}
},
handleTimeoutForCursor: function () {
this.hideCursor();
},
updateUsableDisplayArea: function () {
if (gMode === kModeMobile) {
var a = gIpad;
if (this.orientation === kOrientationLandscape) {
this.usableDisplayWidth = a ? kiPadDeviceHeight : kiPhoneDeviceHeight;
this.usableDisplayHeight =
(a ? kiPadDeviceWidth : kiPhoneDeviceWidth) -
kiPhoneStatusBarHeight -
kiPhoneLandscapeButtonBarHeight -
(a ? kiPadAddressBarHeight + kiPadBookmarksBarHeight : 0);
} else {
this.usableDisplayWidth = a ? kiPadDeviceWidth : kiPhoneDeviceWidth;
this.usableDisplayHeight =
(a ? kiPadDeviceHeight : kiPhoneDeviceHeight) -
kiPhoneStatusBarHeight -
kiPhonePortraitButtonBarHeight -
(a ? kiPadBookmarksBarHeight + 10 : 0);
}
} else {
this.usableDisplayWidth = window.innerWidth;
this.usableDisplayHeight = window.innerHeight;
}
},
clearLaunchMode: function () {
this.inLaunchMode = false;
var a = this;
runInNextEventLoop(this.showAll.bind(this));
},
positionWaitingIndicator: function () {
var c = 110;
var b = 32;
var a;
var d;
if (gMode === kModeMobile && this.orientation === kOrientationUnknown) {
a = 1000;
d = 1000;
} else {
if (gMode === kModeMobile && this.orientation === kOrientationPortrait) {
a = (this.usableDisplayWidth - c) / 2;
if (gIpad === false) {
d = kMobilePortraitModeVerticalCenterLine - c / 2;
} else {
d = kPadPortraitModeVerticalCenterLine - c / 2;
}
} else {
a = (this.usableDisplayWidth - c) / 2;
d = (this.usableDisplayHeight - c) / 2;
}
}
setElementPosition(this.waitingIndicator, d, a, c, c);
},
hideWaitingIndicator: function () {
this.waitingIndicator.style.display = "none";
},
showWaitingIndicator: function () {
this.waitingIndicator.style.display = "block";
},
convertDisplayCoOrdsToShowCoOrds: function (d) {
var b = {};
var c = this.stageAreaLeft + this.stageAreaWidth;
var a = this.stageAreaTop + this.stageAreaHeight;
if (
d.pointX < this.stageAreaLeft ||
d.pointX > c ||
d.pointY < this.stageAreaTop ||
d.pointY > a
) {
b.pointX = -1;
b.pointY = -1;
} else {
b.pointX =
((d.pointX - this.stageAreaLeft) / this.stageAreaWidth) *
this.showWidth;
b.pointY =
((d.pointY - this.stageAreaTop) / this.stageAreaHeight) *
this.showHeight;
}
return b;
},
layoutDisplay: function () {
this.updateUsableDisplayArea();
var q;
var k;
if (gMode === kModeDesktop) {
q = this.usableDisplayWidth;
k = this.usableDisplayHeight;
if (!gShowController.isFullscreen) {
if (q > this.showWidth || k > k) {
q = this.showWidth;
k = k;
}
}
} else {
if (gIpad === false) {
if (this.orientation === kOrientationPortrait) {
q = this.usableDisplayWidth - 2 * kMobilePortraitModeHorizontalMargin;
k = kMobilePortraitModeMaxStageHeight;
} else {
q =
this.usableDisplayWidth - 2 * kMobileLandscapeModeHorizontallMargin;
k = this.usableDisplayHeight - 2 * kMobileLandscapeModeVerticalMargin;
}
} else {
if (this.orientation === kOrientationPortrait) {
q = this.usableDisplayWidth - 2 * kPadPortraitModeHorizontalMargin;
k = kPadPortraitModeMaxStageHeight;
} else {
q = this.usableDisplayWidth - 2 * kPadLandscapeModeHorizontallMargin;
k = this.usableDisplayHeight - 2 * kPadLandscapeModeVerticalMargin;
}
}
}
var o = scaleSizeWithinSize(this.showWidth, this.showHeight, q, k);
this.stageAreaWidth = o.width;
this.stageAreaHeight = o.height;
this.stageAreaLeft = (this.usableDisplayWidth - this.stageAreaWidth) / 2;
if (gMode === kModeDesktop) {
this.stageAreaTop = (k - this.stageAreaHeight) / 2;
} else {
if (this.orientation === kOrientationPortrait) {
if (gIpad === false) {
this.stageAreaTop = Math.max(
10,
kMobilePortraitModeVerticalCenterLine - this.stageAreaHeight / 2
);
} else {
this.stageAreaTop = Math.max(
10,
kPadPortraitModeVerticalCenterLine - this.stageAreaHeight / 2
);
}
} else {
this.stageAreaTop =
(this.usableDisplayHeight - this.stageAreaHeight) / 2;
}
}
setElementPosition(
this.stageArea,
this.stageAreaTop,
this.stageAreaLeft,
this.stageAreaWidth,
this.stageAreaHeight
);
var e = -1;
var b = -1;
var p = -1;
var h = -1;
var a = null;
if (gMode === kModeDesktop) {
a = false;
e = -1;
b = -1;
p = -1;
h = -1;
} else {
a = true;
p = 0;
h = 0;
if (gIpad) {
b = kiPadDeviceHeight;
} else {
b = kiPhoneDeviceHeight;
}
e = b;
}
if (p != -1 && h != -1 && e != -1 && b != -1) {
var s = document.getElementById("background");
s.style.top = p;
s.style.left = h;
s.style.width = e;
s.style.height = b;
if (a === true) {
s.style.visibility = "visible";
}
}
var g = {
x: 0,
y: 0,
width: this.usableDisplayWidth,
height: this.stageAreaTop,
};
var d = {
x: 0,
y: this.stageAreaTop + this.stageAreaHeight,
width: this.usableDisplayWidth,
height:
this.usableDisplayHeight - this.stageAreaTop - this.stageAreaHeight,
};
var n = {
x: 0,
y: this.stageAreaTop,
width: this.stageAreaLeft,
height: this.stageAreaHeight,
};
var i = {
x: this.stageAreaLeft + this.stageAreaWidth,
y: this.stageAreaTop,
width: this.usableDisplayWidth - this.stageAreaWidth - n.width,
height: this.stageAreaHeight,
};
var l = document.getElementById("statisticsDisplay");
if (
this.showStatisticsDisplay &&
gIpad &&
this.orientation === kOrientationPortrait
) {
setElementPosition(
l,
d.y + 70,
0,
this.usableDisplayWidth,
d.height - 105
);
l.style.visibility = "visible";
}
if (gMode != kModeDesktop) {
if (this.orientation === kOrientationPortrait) {
var m =
kNavigationArrowSize +
2 * kMobilePortraitModeNavigationAreaSideMargin;
var f = kNavigationArrowSize + 2 * kStageToNavigationAreaGap;
var r = this.usableDisplayWidth - 2 * m;
var c = d.y + 7;
setElementPosition(this.previousButton, c, 0, m, f);
setElementPosition(
this.slideCounter,
c + kStageToNavigationAreaGap,
m,
r,
f
);
setElementPosition(this.nextButton, c, m + r - 5, m, f);
setElementPosition(
this.helpText,
d.y + d.height - kHelpAreaToBottomGap - kHelpAreaHeight,
0,
this.usableDisplayWidth,
kHelpAreaHeight
);
setElementPosition(
this.infoPanelIcon,
this.usableDisplayHeight - kInfoPanelButtonHeight,
this.usableDisplayWidth - kInfoPanelButtonWidth - 5,
kInfoPanelButtonWidth,
kInfoPanelButtonHeight
);
} else {
var j = { x: 0, y: 0, width: 0, height: 0 };
if (n.width > kMobileLandscapeModeMinSideSpacerWidth) {
setElementRect(this.previousButton, n);
setElementRect(this.nextButton, i);
} else {
setElementRect(this.previousButton, j);
setElementRect(this.nextButton, j);
}
setElementRect(this.slideCounter, j);
setElementRect(this.helpText, j);
setElementRect(this.infoPanelIcon, j);
}
}
this.positionWaitingIndicator();
this.hideAddressBar();
document.fire(kStageSizeDidChangeEvent, {
left: this.stageAreaLeft,
top: this.stageAreaTop,
width: this.stageAreaWidth,
height: this.stageAreaHeight,
});
},
showApplicableControls: function () {
if (this.inLaunchMode === true) {
hideElement(this.previousButton);
hideElement(this.nextButton);
hideElement(this.slideCounter);
hideElement(this.helpText);
hideElement(this.infoPanelIcon);
} else {
if (gMode === kModeDesktop) {
hideElement(this.previousButton);
hideElement(this.nextButton);
hideElement(this.slideCounter);
hideElement(this.helpText);
hideElement(this.infoPanelIcon);
} else {
if (this.orientation === kOrientationPortrait) {
showElement(this.previousButton);
showElement(this.nextButton);
showElement(this.slideCounter);
showElement(this.helpText);
showElement(this.infoPanelIcon);
} else {
hideElement(this.slideCounter);
hideElement(this.helpText);
hideElement(this.infoPanelIcon);
if (this.stageAreaLeft > kMobileLandscapeModeMinSideSpacerWidth) {
showElement(this.previousButton);
showElement(this.nextButton);
} else {
hideElement(this.previousButton);
hideElement(this.nextButton);
}
}
}
}
this.hideAddressBar();
},
showAll: function () {
this.hideWaitingIndicator();
setTimeout(this.showAll_partTwo.bind(this));
},
showAll_partTwo: function () {
if (gDevice === kDeviceMobile) {
window.scrollTo(0, 1);
setTimeout(this.showAll_partThree.bind(this));
} else {
this.showAll_partThree();
}
},
showAll_partThree: function () {
if (this.inLaunchMode === false) {
this.showApplicableControls();
}
showElement(this.stageArea);
var a = navigator.userAgent.match(/Windows/);
if (a) {
if (gShowController.delegate.triggerReflow) {
gShowController.delegate.triggerReflow();
}
}
showElement(this.hyperlinkPlane);
if (gMode === kModeMobile) {
showElement(this.infoPanelIcon);
}
},
setPreviousButtonEnabled: function (a) {
if (this.hyperlinksOnly) {
return;
}
if (gMode != kModeDesktop) {
if (a) {
this.previousButton.setAttribute("class", "previousButtonEnabled");
} else {
this.previousButton.setAttribute("class", "previousButtonDisabled");
}
}
},
setNextButtonEnabled: function (a) {
if (this.hyperlinksOnly) {
return;
}
if (gMode != kModeDesktop) {
if (a) {
this.nextButton.setAttribute("class", "nextButtonEnabled");
} else {
this.nextButton.setAttribute("class", "nextButtonDisabled");
}
}
},
hideAddressBar: function () {
if (this.inLaunchMode) {
return;
}
if (gDevice === kDeviceMobile) {
var a = this.initialAddressBarScrollPerformed ? 0 : kHideAddressBarDelay;
setTimeout("window.scrollTo(0, 1);", a);
this.initialAddressBarScrollPerformed = true;
}
},
updateStatisticsDisplay: function () {
if (this.showStatisticsDisplay === false) {
return;
}
var k = document.getElementById("statisticsDisplay");
var j = gShowController.textureManager.getCacheStatistics();
var a = gShowController.scriptManager.degradeStatistics;
var h = gShowController.stageManager.debugGetStageStatistics();
var d = gShowController.textureManager.numLoadFailures;
var c = gShowController.textureManager.numOutstandingLoadRequests;
var i = 1024 * 1024;
var b = gSafeMaxPixelCount / i;
b = Math.floor(b * 100) / 100;
j.numPixels /= i;
h.numPixels /= i;
j.numPixels = Math.floor(j.numPixels * 100) / 100;
h.numPixels = Math.floor(h.numPixels * 100) / 100;
var e = false;
var g = false;
if (j.numPixels > b) {
e = true;
this.hasCacheEverGoneOverPixelLimit = true;
}
if (h.numPixels > b) {
g = true;
this.hasStageEverGoneOverPixelLimit = true;
}
if (j.numPixels > this.cacheHighWaterMark) {
this.cacheHighWaterMark = j.numPixels;
}
if (h.numPixels > this.stageHighWaterMark) {
this.stageHighWaterMark = h.numPixels;
}
var f =
"<div style='position: absolute; left: 0px;'><b>Cache Statistics:</b><br>- Scenes: <b>" +
j.numScenes +
"</b><br>- Textures: <b>" +
j.numTextures +
"</b><br>- Pixels: <b>" +
j.numPixels +
" MP</b><br>- Peak Pixels: <b>" +
this.cacheHighWaterMark +
" MP</b><br>%nbsp<br><b>Limits:</b><br>- Max Pixels: <b>" +
b +
" MP</b><br></div><div style='position: absolute; left: 175px;'><b>Scene Statistics:</b><br>- Scene Index: <b>" +
gShowController.currentSceneIndex +
"</b><br>- Textures: <b>" +
h.numTextures +
"</b><br>- Total Pixels: <b>" +
h.numPixels +
" MP</b><br>- Peak Pixels: <b>" +
this.stageHighWaterMark +
" MP</b><br><b>Texture Loader:</b><br>- Num Load Requests: <b>" +
(c > 0 ? "<span style='color:yellow;'>" + c + "</span>" : "0") +
"</b><br>- Num Load Failures: <b>" +
(d > 0 ? "<span style='color:red;'>" + d + "</span>" : "0") +
"</b><br></div><div style='position: absolute; left: 350px;'><b>Degrade Statistics:</b><br>- Scenes w/Degrades: <b>" +
a.numDegradedSlides +
"</b><br>- Total Textures Degraded: <b>" +
a.numDegradedTextures +
"</b><br>- Max Textures/Scene: <b>" +
a.maxNumDegradedTexturesPerSlide +
"</b><br>- Textures in Current: <b>" +
(h.numDegraded > 0
? "<span style='color:yellow;'>" + h.numDegraded + "</span>"
: "0") +
"</b><br></div><div style='position: absolute; left: 550px;'><b>Summary:</b><br>- Cache: <br>- Over Pixel Limit Now: <b>" +
(e ? "<span style='color:red;'>YES</span>" : "NO") +
"</b><br>- Ever Over Pixel Limit: <b>" +
(this.hasCacheEverGoneOverPixelLimit
? "<span style='color:red;'>YES</span>"
: "NO") +
"</b><br>- Stage: <br>- Over Pixel Limit Now: <b>" +
(g ? "<span style='color:red;'>YES</span>" : "NO") +
"</b><br>- Ever Over Pixel Limit: <b>" +
(this.hasStageEverGoneOverPixelLimit
? "<span style='color:red;'>YES</span>"
: "NO") +
"</b><br></div>";
k.innerHTML = f;
},
});
|
/**
* Copyright (C) 2006-2021 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.talend.sdk.component.studio.model.parameter;
import static org.talend.sdk.component.studio.model.parameter.Metadatas.MAIN_FORM;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import javax.json.bind.annotation.JsonbCreator;
import javax.json.bind.annotation.JsonbProperty;
import org.talend.core.model.process.EParameterFieldType;
public class PropertyNode {
public static final String UPDATE_BUTTON = ".update";
static final String CONNECTION_BUTTON = ".testConnection";
static final String VALIDATION = "Validation";
private PropertyNode parent;
private final List<PropertyNode> children;
private final Map<String, Layout> layouts = new HashMap<>();
private final PropertyDefinitionDecorator property;
private final EParameterFieldType fieldType;
/**
* Denotes whether this node is root in current tree
*/
private final boolean root;
public PropertyNode(final PropertyDefinitionDecorator property, final EParameterFieldType fieldType, final boolean root) {
this.property = property;
this.fieldType = fieldType;
this.root = root;
this.children = new ArrayList<>();
}
/*
Constructor for tests
*/
@JsonbCreator
public PropertyNode(@JsonbProperty("property") final PropertyDefinitionDecorator property,
@JsonbProperty("fieldType") final EParameterFieldType fieldType,
@JsonbProperty("root") final boolean root,
@JsonbProperty("children") List<PropertyNode> children) {
this.property = property;
this.fieldType = fieldType;
this.root = root;
this.children = new ArrayList<>(children);
}
public void addChild(final PropertyNode child) {
children.add(child);
child.setParent(this);
}
public String getId() {
return property.getPath();
}
public String getParentId() {
return property.getParentPath();
}
public boolean isLeaf() {
return children.isEmpty();
}
/**
* Checks whether it is column according ui::gridlayout for specified <code>form</code>
*
* @param form Name of form
* @return true, if it column; false - otherwise
*/
public boolean isColumn(final String form) {
if (isRoot()) {
return false;
}
final PropertyDefinitionDecorator parentProperty = getParent().getProperty();
if (!parentProperty.hasGridLayout(form)) {
return getParent().isColumn(form);
}
return parentProperty.isColumn(property.getName(), form);
}
/**
* Traverses all nodes
*
* @param visitor the property visitor to use to traverse the nodes.
*/
public <T extends PropertyVisitor> T accept(final T visitor) {
children.forEach(child -> child.accept(visitor));
visitor.visit(this);
return visitor;
}
/**
* Traverses nodes of specified <code>form</code> in sorted according metadata order
*
* @param visitor the property visitor to use to traverse the nodes.
* @param form Name of form
*/
public void accept(final PropertyVisitor visitor, final String form) {
sortChildren(getChildren(form), form).forEach(child -> child.accept(visitor, form));
visitor.visit(this);
}
private void acceptParentFirst(final PropertyVisitor visitor, final String form) {
visitor.visit(this);
final List<PropertyNode> children = sortChildren(getChildren(form), form);
children.forEach(child -> child.acceptParentFirst(visitor, form));
}
/**
* Returns children, which belongs to specified {@code form}
*
* @param form Name of form
* @return children of specified form
*/
public List<PropertyNode> getChildren(final String form) {
final Set<String> childrenNames = new HashSet<>(getChildrenNames(form));
return children.stream().filter(node -> childrenNames.contains(node.property.getName())).collect(Collectors.toList());
}
/**
* Checks whether subtree rooted by this node has leaves, which belongs to specified {@code form}
*
* @param form Name of form
* @return true, if it has leaves
*/
public boolean hasLeaves(final String form) {
final ArrayList<PropertyNode> subNodes = new ArrayList<>(getChildren(form));
for (int i = 0; i < subNodes.size(); i++) {
final PropertyNode current = subNodes.get(i);
if (current.isLeaf()) {
return true;
} else {
subNodes.addAll(current.getChildren(form));
}
}
return false;
}
private PropertyNode getChild(final String name, final String form) {
Objects.requireNonNull(name);
Objects.requireNonNull(form);
return getChildren(form).stream().filter(p -> name.equals(p.getProperty().getName())).findFirst()
.orElseThrow(() -> new IllegalArgumentException("no child with name " + name));
}
/**
* Sorts children according order specified in metadata or do nothing if order is not specified
*
* @param children children node, which belongs specified form
* @param form Name or form
* @return sorted list
*/
protected List<PropertyNode> sortChildren(final List<PropertyNode> children, final String form) {
final Map<String, Integer> order = property.getChildrenOrder(form);
if (order != null) {
children.sort((node1, node2) -> {
Integer i1 = order.get(node1.getProperty().getName());
Integer i2 = order.get(node2.getProperty().getName());
return i1.compareTo(i2);
});
}
// else do not sort
return children;
}
/**
* Returns all children names
*
* Subclasses may override this method
*
* @return children names
*/
protected List<String> getChildrenNames() {
return children.stream().map(n -> n.getProperty().getName()).collect(Collectors.toList());
}
/**
* Returns children names for specified <code>form</code>.
* If <code>form</code> is Main form its children may be specified by ui::gridlayout or ui:optionsorder.
* If it has no both metadata, then all children are considered as Main children.
* For other <code>form</code> children may be specified only by ui::gridlayout.
*
* @param form Name of form
* @return children names of specified <code>form</code>
*/
protected List<String> getChildrenNames(final String form) {
if (MAIN_FORM.equals(form)) {
return getMainChildrenNames();
}
return property.getChildrenNames(form);
}
/**
* Returns children names for Main form
* If it has ui:gridlayout metadata value for Main form, then names are retrieved from there
* If it has ui:gridlayout for other forms, then it is considered that Main form is empty
* If it has ui:optionsorder (and has no any ui:gridlayout), then names are retrieved from there
* If it has no both metadatas, then all children belong to Main form
*
* This implementation calls overridable {@link #getChildrenNames()} to get all children names
*
* @return children names for Main form
*/
private List<String> getMainChildrenNames() {
if (property.hasGridLayout(MAIN_FORM)) {
return property.getChildrenNames(MAIN_FORM);
}
if (property.hasGridLayouts()) {
return Collections.emptyList();
}
if (property.hasOptionsOrder()) {
return property.getOptionsOrderNames();
}
return getChildrenNames();
}
public Layout getLayout(final String name) {
if (!layouts.containsKey(name)) {
throw new IllegalArgumentException("No layout " + name);
}
return layouts.get(name);
}
/**
* Creates layout for specified {@code form} and computes position for all children nodes.
* It traverse a tree in-depth. Children nodes are visited before parent
*
* @param form Layout form for which node position is computed
*/
void computePosition(final String form) {
accept(new LayoutHeightVisitor(form), form);
acceptParentFirst(new LayoutPositionVisitor(form), form);
}
void addLayout(final String name, final Layout layout) {
layouts.putIfAbsent(name, layout);
}
private static class LayoutHeightVisitor implements PropertyVisitor {
private final String form;
private PropertyNode current;
@Override
public void visit(final PropertyNode node) {
this.current = node;
createLayout();
computeHeight();
}
private void createLayout() {
final Layout layout = new Layout(current.getId());
if (!current.isLeaf()) {
if (current.getProperty().hasGridLayout(form)) {
fillGridLayout(layout, current.getProperty().getUpdatable());
} else {
fillSimpleLayout(layout, current.getProperty().getUpdatable());
}
if (current.getProperty().isCheckable()) {
addButton(layout, CONNECTION_BUTTON);
}
if (current.getProperty().getUpdatable().map(v -> v.getPreviousProperty().isEmpty()).orElse(false)) {
addButton(layout, UPDATE_BUTTON);
}
}
current.addLayout(form, layout);
}
private void fillGridLayout(final Layout layout, final Optional<PropertyDefinitionDecorator.Updatable> updatable) {
final String gridLayout = current.getProperty().getGridLayout(form);
final String[] rows = gridLayout.split("\\|");
// create Level for each row
for (final String row : rows) {
final Level level = new Level();
layout.getLevels().add(level);
for (final String column : row.split(",")) {
final PropertyNode child = current.getChild(column, form);
if (child.getProperty().hasConstraint() || child.getProperty().hasValidation()) {
addValidationLevel(child, layout);
}
if (matches(updatable, column)) {
addButton(layout, UPDATE_BUTTON);
}
level.getColumns().add(child.getLayout(form));
}
}
}
private void fillSimpleLayout(final Layout layout, final Optional<PropertyDefinitionDecorator.Updatable> updatable) {
final List<PropertyNode> children = current.sortChildren(current.getChildren(form), form);
children.forEach(child -> {
final Level level = new Level();
layout.getLevels().add(level);
// each level contains only one column, when there is no GridLayout
level.getColumns().add(child.getLayout(form));
if (child.getProperty().hasConstraint() || child.getProperty().hasValidation()) {
addValidationLevel(child, layout);
}
if (matches(updatable, child.getProperty().getName())) {
addButton(layout, UPDATE_BUTTON);
}
});
}
private boolean matches(final Optional<PropertyDefinitionDecorator.Updatable> updatable, final String name) {
return updatable.map(v -> name.equals(v.getPreviousProperty())).orElse(false);
}
private void addValidationLevel(final PropertyNode node, final Layout layout) {
final Level level = new Level();
Layout validationLayout = new Layout(node.getProperty().getPath() + VALIDATION);
level.getColumns().add(validationLayout);
layout.getLevels().add(level);
}
/**
* Adds "Test Connection" button
*
* @param layout parent node layout
*/
private void addButton(final Layout layout, final String buttonName) {
final Layout buttonLayout = new Layout(layout.getPath() + buttonName);
buttonLayout.setHeight(1);
final Level level = new Level();
level.getColumns().add(buttonLayout);
layout.getLevels().add(level);
}
private void computeHeight() {
final Layout layout = current.getLayout(form);
int height = 0;
if (current.isLeaf()) {
height = 1;
if (current.getProperty().hasConstraint() || current.getProperty().hasValidation()) {
height++;
}
} else {
layout.getLevels().forEach(level -> {
final int levelHeight = level.getColumns().stream().mapToInt(Layout::getHeight).max().getAsInt();
level.setHeight(levelHeight);
});
height = layout.getLevels().stream().mapToInt(Level::getHeight).sum();
}
layout.setHeight(height);
}
public LayoutHeightVisitor(final String form) {
this.form = form;
}
}
private static class LayoutPositionVisitor implements PropertyVisitor {
/**
* First 2 position are occupied by schema and property type
*/
private static final int INITIAL_POSITION = 3;
private final String form;
@Override
public void visit(final PropertyNode node) {
if (!node.isLeaf()) {
final Layout layout = node.getLayout(form);
if (node.isRoot()) {
layout.setPosition(INITIAL_POSITION);
}
int position = layout.getPosition();
for (final Level level : layout.getLevels()) {
level.setPosition(position);
for (final Layout column : level.getColumns()) {
column.setPosition(position);
}
position = position + level.getHeight();
}
} // else no-op as position is set during visiting only parent node
}
public LayoutPositionVisitor(final String form) {
this.form = form;
}
}
public PropertyNode getParent() {
return this.parent;
}
public List<PropertyNode> getChildren() {
return this.children;
}
public Map<String, Layout> getLayouts() {
return this.layouts;
}
public PropertyDefinitionDecorator getProperty() {
return this.property;
}
public EParameterFieldType getFieldType() {
return this.fieldType;
}
/**
* Denotes whether this node is root in current tree
*/
public boolean isRoot() {
return this.root;
}
@Override
public boolean equals(final Object o) {
if (o == this) {
return true;
}
if (!(o instanceof PropertyNode)) {
return false;
}
final PropertyNode other = (PropertyNode) o;
if (!other.canEqual(this)) {
return false;
}
final Object this$parent = this.getParent();
final Object other$parent = other.getParent();
if (this$parent == null ? other$parent != null : !this$parent.equals(other$parent)) {
return false;
}
final Object this$children = this.getChildren();
final Object other$children = other.getChildren();
if (this$children == null ? other$children != null : !this$children.equals(other$children)) {
return false;
}
final Object this$layouts = this.getLayouts();
final Object other$layouts = other.getLayouts();
if (this$layouts == null ? other$layouts != null : !this$layouts.equals(other$layouts)) {
return false;
}
final Object this$property = this.getProperty();
final Object other$property = other.getProperty();
if (this$property == null ? other$property != null : !this$property.equals(other$property)) {
return false;
}
final Object this$fieldType = this.getFieldType();
final Object other$fieldType = other.getFieldType();
if (this$fieldType == null ? other$fieldType != null : !this$fieldType.equals(other$fieldType)) {
return false;
}
if (this.isRoot() != other.isRoot()) {
return false;
}
return true;
}
protected boolean canEqual(final Object other) {
return other instanceof PropertyNode;
}
@Override
public int hashCode() {
final int PRIME = 59;
int result = 1;
// don't calc parent/children, because it has dead loop/stackoverflow issue
// final Object $parent = this.getParent();
// result = result * PRIME + ($parent == null ? 43 : $parent.hashCode());
// final Object $children = this.getChildren();
// result = result * PRIME + ($children == null ? 43 : $children.hashCode());
final Object $layouts = this.getLayouts();
result = result * PRIME + ($layouts == null ? 43 : $layouts.hashCode());
final Object $property = this.getProperty();
result = result * PRIME + ($property == null ? 43 : $property.hashCode());
final Object $fieldType = this.getFieldType();
result = result * PRIME + ($fieldType == null ? 43 : $fieldType.hashCode());
result = result * PRIME + (this.isRoot() ? 79 : 97);
return result;
}
@Override
public String toString() {
return "PropertyNode(children=" + this.getChildren() + ", layouts=" + this.getLayouts() + ", property="
+ this.getProperty() + ", fieldType=" + this.getFieldType() + ", root=" + this.isRoot() + ")";
}
protected void setParent(final PropertyNode parent) {
this.parent = parent;
}
}
|
<filename>api/helpers/generate-token.js
const crypto = require('crypto');
const moment = require('moment-timezone');
module.exports = {
sync: true, // this is a synchronous helper
friendlyName: 'Generate token',
description: 'Generate generic token for generic use, generically. (64 characters)',
inputs: {
extra: {
type: 'string',
description: 'A bit of random, extra bits to change up the hash.',
defaultsTo: 'Evil will always triumph, because good is dumb. -<NAME>'
}
},
exits: {},
fn: function(inputs, exits) {
return exits.success(
crypto.createHmac('sha256', sails.config.session.secret)
.update(
crypto.randomBytes(21) // cryptographically-secure random characters
+ moment(new Date()).format() // throw in the current time stamp
+ 'I am a tea pot' // the best HTTP status code
+ inputs.extra // an optional way to add a bit more randomness to the mix
+ crypto.randomBytes(21)
)
.digest('hex')
);
}
};
|
#!/bin/bash
# This script lists the contents of the
# current directory and its sub-directories
for entry in $(ls -R .)
do
if [ -d $entry ]
then
echo "dir $entry"
else
echo "file $entry"
fi
done
|
#!/bin/bash
# Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of NVIDIA CORPORATION nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
REPO_VERSION=${NVIDIA_TENSORRT_SERVER_VERSION}
if [ "$#" -ge 1 ]; then
REPO_VERSION=$1
fi
if [ -z "$REPO_VERSION" ]; then
echo -e "Repository version must be specified"
echo -e "\n***\n*** Test Failed\n***"
exit 1
fi
export CUDA_VISIBLE_DEVICES=0
CLIENT_LOG="./perf_client.log"
PERF_CLIENT=../clients/perf_client
DATADIR=`pwd`/models
TESTDATADIR=`pwd`/test_data
INT_JSONDATAFILE=`pwd`/json_input_data_files/int_data.json
INT_DIFFSHAPE_JSONDATAFILE=`pwd`/json_input_data_files/int_data_diff_shape.json
FLOAT_DIFFSHAPE_JSONDATAFILE=`pwd`/json_input_data_files/float_data_with_shape.json
STRING_JSONDATAFILE=`pwd`/json_input_data_files/string_data.json
STRING_WITHSHAPE_JSONDATAFILE=`pwd`/json_input_data_files/string_data_with_shape.json
SEQ_JSONDATAFILE=`pwd`/json_input_data_files/seq_data.json
SHAPETENSORADTAFILE=`pwd`/json_input_data_files/shape_tensor_data.json
SERVER=/opt/tensorrtserver/bin/trtserver
SERVER_ARGS=--model-repository=$DATADIR
SERVER_LOG="./inference_server.log"
ERROR_STRING="error | Request count: 0 | : 0 infer/sec\|: 0 usec"
source ../common/util.sh
rm -f $SERVER_LOG $CLIENT_LOG
rm -rf $DATADIR $TESTDATADIR
mkdir -p $DATADIR
# Copy fixed-shape models
cp -r /data/inferenceserver/${REPO_VERSION}/qa_model_repository/graphdef_int32_int32_int32 $DATADIR/
cp -r /data/inferenceserver/${REPO_VERSION}/qa_model_repository/graphdef_nobatch_int32_int32_int32 $DATADIR/
cp -r /data/inferenceserver/${REPO_VERSION}/qa_model_repository/graphdef_object_object_object $DATADIR/
cp -r /data/inferenceserver/${REPO_VERSION}/qa_model_repository/graphdef_nobatch_object_object_object $DATADIR/
# Copy a variable-shape models
cp -r /data/inferenceserver/${REPO_VERSION}/qa_variable_model_repository/graphdef_object_int32_int32 $DATADIR/
cp -r /data/inferenceserver/${REPO_VERSION}/qa_variable_model_repository/graphdef_int32_int32_float32 $DATADIR/
# Copy shape tensor models
cp -r /data/inferenceserver/${REPO_VERSION}/qa_shapetensor_model_repository/plan_zero_1_float32 $DATADIR/
# Copying ensemble including a sequential model
cp -r /data/inferenceserver/${REPO_VERSION}/qa_sequence_model_repository/savedmodel_sequence_object $DATADIR
cp -r /data/inferenceserver/${REPO_VERSION}/qa_ensemble_model_repository/qa_sequence_model_repository/simple_savedmodel_sequence_object $DATADIR
cp -r /data/inferenceserver/${REPO_VERSION}/qa_ensemble_model_repository/qa_sequence_model_repository/nop_TYPE_FP32_-1 $DATADIR
# Copying variable sequence model
cp -r /data/inferenceserver/${REPO_VERSION}/qa_variable_sequence_model_repository/graphdef_sequence_float32 $DATADIR
mkdir $DATADIR/nop_TYPE_FP32_-1/1
cp libidentity.so $DATADIR/nop_TYPE_FP32_-1/1/
# Copy inception model to the model repository
cp -r /data/inferenceserver/${REPO_VERSION}/tf_model_store/inception_v1_graphdef $DATADIR
# Copy resnet50v1.5_fp16
cp -r /data/inferenceserver/${REPO_VERSION}/perf_model_store/resnet50v1.5_fp16_savedmodel $DATADIR
# Generating test data
mkdir -p $TESTDATADIR
for INPUT in INPUT0 INPUT1; do
for i in {1..16}; do
echo '1' >> $TESTDATADIR/${INPUT}
done
done
RET=0
run_server
if [ "$SERVER_PID" == "0" ]; then
echo -e "\n***\n*** Failed to start $SERVER\n***"
cat $SERVER_LOG
exit 1
fi
# Sanity check on measurements are not all zero
# Testing simple configurations with different shared memory types
for SHARED_MEMORY_TYPE in none system cuda; do
set +e
$PERF_CLIENT -v -i grpc -m graphdef_int32_int32_int32 -t 1 -p2000 -b 1 \
--shared-memory=$SHARED_MEMORY_TYPE >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -v -i grpc -m graphdef_int32_int32_int32 -t 1 -p2000 -b 1 -a \
--shared-memory=$SHARED_MEMORY_TYPE>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
done
# Testing with inception model
for SHARED_MEMORY_TYPE in none system cuda; do
set +e
$PERF_CLIENT -v -m inception_v1_graphdef -t 1 -p2000 -b 1 \
--shared-memory=$SHARED_MEMORY_TYPE >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -v -m inception_v1_graphdef -t 1 -p2000 -b 1 -a \
--shared-memory=$SHARED_MEMORY_TYPE>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
done
# Testing with resnet50 models with large batch sizes
for SHARED_MEMORY_TYPE in none system cuda; do
set +e
$PERF_CLIENT -v -m inception_v1_graphdef -t 2 -p2000 -b 64 \
--shared-memory=$SHARED_MEMORY_TYPE >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -i grpc -v -m inception_v1_graphdef -t 2 -p2000 -b 64 \
--shared-memory=$SHARED_MEMORY_TYPE>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
done
# Test perf client behavior on different model with different batch size
for MODEL in graphdef_nobatch_int32_int32_int32 graphdef_int32_int32_int32; do
# Valid batch size
set +e
$PERF_CLIENT -v -i grpc -m $MODEL -t 1 -p2000 -b 1 >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
# Invalid batch sizes
for STATIC_BATCH in 0 10; do
set +e
$PERF_CLIENT -v -i grpc -m $MODEL -t 1 -p2000 -b $STATIC_BATCH >$CLIENT_LOG 2>&1
if [ $? -eq 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
done
done
# Testing with the new arguments
set +e
$PERF_CLIENT -v -i grpc -m graphdef_int32_int32_int32 >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -v -i grpc -m graphdef_int32_int32_int32 --concurrency-range 1:5:2 >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "error | Request count: 0 | : 0 infer/sec\|: 0 usec|Request concurrency: 2" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -v -i grpc -m graphdef_int32_int32_int32 --concurrency-range 1:5:2 \
--input-data=${INT_JSONDATAFILE} >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "error | Request count: 0 | : 0 infer/sec\|: 0 usec|Request concurrency: 2" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -v -i grpc -m graphdef_int32_int32_int32 --request-rate-range 1000:2000:500 \
-p1000 -b 1 -a>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -v -i grpc -m graphdef_int32_int32_int32 --request-rate-range 1000:2000:500 \
--input-data=${INT_JSONDATAFILE} -p1000 -b 1 -a>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -v -i grpc -m graphdef_int32_int32_int32 --request-rate-range 1000:2000:100 -p1000 -b 1 \
-a --binary-search --request-distribution "poisson" -l 10 >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
set -e
# Testing with combinations of string input and shared memory types
for SHARED_MEMORY_TYPE in none system cuda; do
set +e
$PERF_CLIENT -v -i grpc -m graphdef_object_object_object --string-data=1 -p2000 \
--shared-memory=$SHARED_MEMORY_TYPE>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
done
# Testing with combinations of file inputs and shared memory types
for SHARED_MEMORY_TYPE in none system cuda; do
set +e
$PERF_CLIENT -v -i grpc -m graphdef_object_object_object --input-data=$TESTDATADIR -p2000 \
--shared-memory=$SHARED_MEMORY_TYPE>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
done
for SHARED_MEMORY_TYPE in none system cuda; do
set +e
$PERF_CLIENT -v -i grpc -m graphdef_object_object_object --input-data=$STRING_JSONDATAFILE \
--input-data=$STRING_JSONDATAFILE -p2000 --shared-memory=$SHARED_MEMORY_TYPE>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
done
# Testing with combinations of variable inputs and shared memory types
for SHARED_MEMORY_TYPE in none system cuda; do
set +e
$PERF_CLIENT -v -i grpc -m graphdef_object_int32_int32 --input-data=$TESTDATADIR \
--shape INPUT0:2,8 --shape INPUT1:2,8 -p2000 --shared-memory=$SHARED_MEMORY_TYPE \
>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
done
for SHARED_MEMORY_TYPE in none system cuda; do
set +e
$PERF_CLIENT -v -i grpc -m graphdef_object_int32_int32 --input-data=$STRING_WITHSHAPE_JSONDATAFILE \
--shape INPUT0:2,8 --shape INPUT1:2,8 -p2000 --shared-memory=$SHARED_MEMORY_TYPE \
>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
done
set +e
$PERF_CLIENT -v -i grpc -m graphdef_int32_int32_float32 --shape INPUT0:2,8,2 \
--shape INPUT1:2,8,2 -p2000 >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
# Trying to batch tensors with different shape
for SHARED_MEMORY_TYPE in none system cuda; do
set +e
$PERF_CLIENT -v -i grpc -m graphdef_int32_int32_float32 --shape INPUT0:2,8,2 --shape INPUT1:2,8,2 -p2000 -b 4 \
--shared-memory=$SHARED_MEMORY_TYPE --input-data=$INT_DIFFSHAPE_JSONDATAFILE >$CLIENT_LOG 2>&1
if [ $? -eq 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "can not batch tensors with different shapes together" | wc -l) -eq 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
done
# Shape tensor I/O model
for SHARED_MEMORY_TYPE in none system cuda; do
set +e
$PERF_CLIENT -v -i grpc -m plan_zero_1_float32 --input-data=$SHAPETENSORADTAFILE \
--shape DUMMY_INPUT0:4,4 -p2000 --shared-memory=$SHARED_MEMORY_TYPE -b 8 \
>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep ": 0 infer/sec\|: 0 usec" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
done
set +e
# Testing with ensemble and sequential model variants
$PERF_CLIENT -v -i grpc -m simple_savedmodel_sequence_object -p 2000 -t5 --streaming \
--input-data=$SEQ_JSONDATAFILE --input-data=$SEQ_JSONDATAFILE >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -v -i grpc -m simple_savedmodel_sequence_object -p 2000 -t5 --sync \
--input-data=$SEQ_JSONDATAFILE >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -v -i grpc -m simple_savedmodel_sequence_object -p 2000 -t5 --sync \
--input-data=$SEQ_JSONDATAFILE >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -v -m simple_savedmodel_sequence_object -p 2000 -t5 --sync \
--input-data=$SEQ_JSONDATAFILE >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -v -m simple_savedmodel_sequence_object -p 1000 --request-rate-range 100:200:50 --sync \
--input-data=$SEQ_JSONDATAFILE >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
# Testing with variable ensemble model. This unit specifies different shape values
# for different inferences.
for SHARED_MEMORY_TYPE in none system cuda; do
set +e
$PERF_CLIENT -v -i grpc -m graphdef_sequence_float32 --shape INPUT:2 --input-data=$FLOAT_DIFFSHAPE_JSONDATAFILE \
--input-data=$FLOAT_DIFFSHAPE_JSONDATAFILE -p2000 --shared-memory=$SHARED_MEMORY_TYPE >$CLIENT_LOG 2>&1
if [ $? -eq 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat $CLIENT_LOG | grep "Inputs to operation Select of type Select must have the same size and shape." | wc -l) -eq 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
done
# Fix me: Uncomment after fixing DLIS-1054
## Testing with very large concurrencies and large dataset
#INPUT_DATA_OPTION="--input-data $SEQ_JSONDATAFILE "
#for i in {1..9}; do
# INPUT_DATA_OPTION=" ${INPUT_DATA_OPTION} ${INPUT_DATA_OPTION}"
#done
#set +e
#$PERF_CLIENT -v -m simple_savedmodel_sequence_object -p 10000 --concurrency-range 1500:2500:500 -i grpc --streaming \
#${INPUT_DATA_OPTION} >$CLIENT_LOG 2>&1
#if [ $? -ne 0 ]; then
# cat $CLIENT_LOG
# echo -e "\n***\n*** Test Failed\n***"
# RET=1
#fi
#if [ $(cat $CLIENT_LOG | grep "${ERROR_STRING}" | wc -l) -ne 0 ]; then
# cat $CLIENT_LOG
# echo -e "\n***\n*** Test Failed\n***"
# RET=1
#fi
#set -e
kill $SERVER_PID
wait $SERVER_PID
if [ $RET -eq 0 ]; then
echo -e "\n***\n*** Test Passed\n***"
else
echo -e "\n***\n*** Test FAILED\n***"
fi
exit $RET
|
import { AbstractFeature } from '../../model/gml/AbstractFeature';
import { AbstractGML } from '../../model/gml/AbstractGML';
import { CodeType } from '../../model/gml/CodeType';
import { Envelope } from '../../model/gml/Envelope';
import { NAMESPACES } from './Namespaces';
import { Point } from '../../model/gml/Point';
import { Referenced } from '../../model/gml/Referenced';
import { TimeInstant } from '../../model/gml/TimeInstant';
import { TimePeriod } from '../../model/gml/TimePeriod';
import { AbstractTime } from '../../model/gml/AbstractTime';
import { DecoderUtils } from './DecoderUtils';
import { ReturnObject } from './ReturnObject';
import { BidiMap } from '../dynamicGUI/BidiMap';
export class GmlDecoder {
private utils = new DecoderUtils();
private _profileIDMap: BidiMap;
public get profileIDMap() {
return this._profileIDMap;
}
public set profileIDMap(profileIDMap: BidiMap) {
this._profileIDMap = profileIDMap;
}
public decodeTime(elem: Element): ReturnObject<AbstractTime> {
const timeInstant = this.decodeTimeInstant(elem);
if (timeInstant != null) { return timeInstant; }
const timePeriod = this.decodeTimePeriod(elem);
if (timePeriod != null) { return timePeriod; }
}
public decodeTimeInstant(elem: Element): ReturnObject<AbstractTime> {
const timeElem = this.utils.getElement(elem, 'TimeInstant', NAMESPACES.GML);
if (timeElem != null) {
const instant = new TimeInstant();
this._profileIDMap = this.utils.processProfileID(timeElem, instant, '', this._profileIDMap);
this.decodeAbstractGML(timeElem, instant);
const timePositionElem = this.utils.getElement(timeElem, 'timePosition', NAMESPACES.GML);
if (timePositionElem != null) {
instant.time = this.getTime(timePositionElem);
this._profileIDMap = this.utils.processProfileID(timePositionElem, instant, 'time', this._profileIDMap);
}
return new ReturnObject(instant, timeElem);
}
}
public decodeTimePeriod(elem: Element): ReturnObject<AbstractTime> {
const timeElem = this.utils.getElement(elem, 'TimePeriod', NAMESPACES.GML);
if (timeElem != null) {
const period = new TimePeriod();
this._profileIDMap = this.utils.processProfileID(timeElem, period, '', this._profileIDMap);
this.decodeAbstractGML(timeElem, period);
const beginPositionElem = this.utils.getElement(timeElem, 'beginPosition', NAMESPACES.GML);
if (beginPositionElem != null) {
period.begin = this.getTime(beginPositionElem);
this._profileIDMap = this.utils.processProfileID(
beginPositionElem, period, 'begin', this._profileIDMap
);
}
const endPositionElem = this.utils.getElement(timeElem, 'endPosition', NAMESPACES.GML);
if (endPositionElem != null) {
period.end = this.getTime(endPositionElem);
this._profileIDMap = this.utils.processProfileID(endPositionElem, period, 'end', this._profileIDMap);
}
return new ReturnObject(period, timeElem);
}
}
public decodeAbstractGML(elem: Element, object: AbstractGML): void {
if (elem.hasAttributeNS(NAMESPACES.GML, 'id')) {
object.gmlId = elem.getAttributeNS(NAMESPACES.GML, 'id');
this._profileIDMap = this.utils.processProfileID(elem, object, 'gmlId', this._profileIDMap);
}
const descriptionElem = this.utils.getElement(elem, 'description', NAMESPACES.GML);
if (descriptionElem != null) {
object.description = descriptionElem.textContent;
this._profileIDMap = this.utils.processProfileID(
descriptionElem, object, 'description', this._profileIDMap
);
}
const descriptionReferenceElem = this.utils.getElement(elem, 'descriptionReference', NAMESPACES.GML);
if (descriptionReferenceElem != null) {
object.descriptionReference = descriptionReferenceElem.textContent;
this._profileIDMap = this.utils.processProfileID(
descriptionReferenceElem, object, 'descriptionReference', this._profileIDMap
);
}
const identifierElem = this.utils.getElement(elem, 'identifier', NAMESPACES.GML);
if (identifierElem != null) {
const returnObject = this.decodeCodeType(identifierElem);
if (returnObject) {
object.identifier = returnObject.value;
this._profileIDMap = this.utils.processProfileID(
returnObject.docElement, object, 'identifier', this._profileIDMap
);
}
}
object.name = this.utils.getDecodedList(
elem, 'name', NAMESPACES.GML, this._profileIDMap, (nameElem) => this.decodeCodeType(nameElem)
);
}
public decodeCodeType(elem: Element): ReturnObject<CodeType> {
let codeSpace = null;
if (elem.hasAttribute('codeSpace')) {
codeSpace = elem.getAttribute('codeSpace');
}
const codeType = new CodeType(elem.textContent, codeSpace);
if (elem.hasAttribute('codeSpace')) {
this._profileIDMap = this.utils.processProfileID(elem, codeType, 'codeSpace', this._profileIDMap);
}
return new ReturnObject(codeType, elem);
}
public decodePoint(elem: Element): ReturnObject<Point> {
const pointElem = this.utils.getElement(elem, 'Point', NAMESPACES.GML);
if (pointElem != null) {
const point = new Point();
this._profileIDMap = this.utils.processProfileID(pointElem, point, '', this._profileIDMap);
this.decodeReferenced(pointElem, point);
this.decodePos(pointElem, point);
return new ReturnObject(point, pointElem);
}
}
public decodePos(elem: Element, point: Point) {
const posElem = this.utils.getElement(elem, 'pos', NAMESPACES.GML);
if (posElem != null) {
const content = posElem.textContent.split(' ');
if (content[0]) {
point.x = +content[0];
this._profileIDMap = this.utils.processProfileID(posElem, point, 'x', this._profileIDMap);
}
if (content[1]) {
point.y = +content[1];
this._profileIDMap = this.utils.processProfileID(posElem, point, 'y', this._profileIDMap);
}
}
}
public decodeAbstractFeature(elem: Element, abstractFeature: AbstractFeature): void {
this.decodeAbstractGML(elem, abstractFeature);
const boundedByElem = this.utils.getElement(elem, 'boundedBy', NAMESPACES.GML);
if (boundedByElem != null) {
abstractFeature.boundedBy = this.decodeEnvelope(boundedByElem);
this._profileIDMap = this.utils.processProfileID(
boundedByElem, abstractFeature, 'boundedBy', this._profileIDMap
);
}
}
public decodeEnvelope(elem: Element): Envelope {
const envelopeElem = this.utils.getElement(elem, 'Envelope', NAMESPACES.GML);
if (envelopeElem != null) {
const envelope = new Envelope();
this._profileIDMap = this.utils.processProfileID(envelopeElem, envelope, '', this._profileIDMap);
this.decodeReferenced(envelopeElem, envelope);
const lowerCorner = this.utils.getElement(envelopeElem, 'lowerCorner', NAMESPACES.GML);
if (lowerCorner != null) {
const lc = lowerCorner.textContent.split(' ');
if (lc.length === 2) {
envelope.lowerCorner = [+lc[0], +lc[1]];
this._profileIDMap = this.utils.processProfileID(
lowerCorner, envelope, 'lowerCorner', this._profileIDMap
);
}
}
const upperCorner = this.utils.getElement(envelopeElem, 'upperCorner', NAMESPACES.GML);
if (upperCorner != null) {
const uc = upperCorner.textContent.split(' ');
if (uc.length === 2) {
envelope.upperCorner = [+uc[0], +uc[1]];
this._profileIDMap = this.utils.processProfileID(
upperCorner, envelope, 'upperCorner', this._profileIDMap
);
}
}
return envelope;
}
}
public decodeReferenced(elem: Element, referenced: Referenced): void {
if (elem.hasAttribute('srsName')) {
referenced.srsName = elem.getAttribute('srsName');
this._profileIDMap = this.utils.processProfileID(elem, referenced, 'srsName', this._profileIDMap);
}
if (elem.hasAttribute('srsDimension') && !isNaN(+elem.getAttribute('srsDimension'))) {
referenced.srsDimension = +elem.getAttribute('srsDimension');
this._profileIDMap = this.utils.processProfileID(elem, referenced, 'srsDimension', this._profileIDMap);
}
if (elem.hasAttribute('axisLabels')) {
referenced.axisLabels = elem.getAttribute('axisLabels').split(' ');
this._profileIDMap = this.utils.processProfileID(elem, referenced, 'axisLabels', this._profileIDMap);
}
if (elem.hasAttribute('uomLabels')) {
referenced.uomLabels = elem.getAttribute('uomLabels').split(' ');
this._profileIDMap = this.utils.processProfileID(elem, referenced, 'uomLabels', this._profileIDMap);
}
}
private getTime(elem: Element): Date {
if (elem.hasAttribute('indeterminatePosition') && elem.getAttribute('indeterminatePosition') === 'unknown') {
return null;
} else {
const date = new Date(Date.parse(elem.textContent));
this._profileIDMap = this.utils.processProfileID(elem, date, '', this._profileIDMap);
return date;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.