text stringlengths 1 1.05M |
|---|
// Create a shopping list in the variable myList. The list should be a
// multi-dimensional array containing several sub-arrays.
// The first element in each sub-array should contain a string with the name of
/the item. The second element should be a number representing the quantity i.e.
// ["Chocolate Bar", 15]
// There should be at least 5 sub-arrays in the list.
var myList = [
['apples', 1],
['oranges', 2],
['watermelons', 3],
['apricots', 4],
['bananas', 5]
]; |
#!/bin/bash
# dumps out the log files according to the provided pattern, but makes sure that
# each one is dumped in chronological order, and any compressed logs are unpacked
# first.
function assemble_log_file()
{
logpath="$1"; shift
# build an array of all the file names, in reverse order since we want the oldest
# files listed first.
full_set=($(ls -1 -r "$logpath"*))
if [ ${#full_set[*]} -lt 1 ]; then
echo "No log files were found matching the pattern '$full_set'"
exit 1
fi
logdump="$(mktemp /tmp/$USER_logdump.XXXXXX)"
for logy in ${full_set[*]}; do
#echo logy is $logy
if [[ $logy =~ .*\.gz ]]; then
gzip -d -c "$logy" >>"$logdump"
else
cat "$logy" >>"$logdump"
fi
done
cat "$logdump"
\rm -f "$logdump"
}
##############
logpath="$1"; shift
if [ -z "$logpath" ]; then
echo "$(basename $0 .sh): Log file dumper"
echo
echo "This script requires a log path, which should be the prefix of some log files"
echo "that it will dump out. All files matching the prefix are printed to standard"
echo "output, and the log entries will be printed in chronological order. Any"
echo "compressed log files will be unpacked first before printing."
echo
echo "Example:"
echo -e "\t$(basename $0 .sh) /var/log/syslog"
echo
exit 1
fi
assemble_log_file "$logpath"
|
APP_NAME=dprweb
OUTDIR="${1:-../../build}"
SOURCEDIR="${2:-$OUTDIR/..}"
rm -f $OUTDIR/$APP_NAME.zip
# generate the ZIP file
echo "Generating $APP_NAME.zip... ($SOURCEDIR -> $OUTDIR)"
pushd $SOURCEDIR
zip -r $OUTDIR/$APP_NAME.zip * -x \*.git\* -x \*node_modules\* *.zip
popd
if hash notify-send 2>/dev/null; then
notify-send "Done!" "Extension Updated"
fi
|
from zDogPy.illustration import Illustration
from zDogPy.shape import Shape
eggplant = '#636'
orange = '#E62'
start = { 'x' : -60, 'y' : -60 }
startControl = { 'x' : 20, 'y' : -60 }
endControl = { 'x' : -20, 'y' : 60 }
end = { 'x' : 60, 'y' : 60 }
I = Illustration()
I.setSize(200, 200)
# curve
Shape(
addTo=I,
path=[
start,
{ 'bezier': [ startControl, endControl, end ] },
],
closed=False,
stroke=20,
color=eggplant,
)
# control points
controlDot = Shape(
addTo=I,
translate=startControl,
stroke=12,
color=orange,
)
controlDot.copy(
translate=endControl,
)
# control handles
controlLine = Shape(
addTo=I,
path=[ start, startControl ],
stroke=2,
color=orange,
)
controlLine.copy(
path=[ end, endControl ],
)
I.showInterface()
I.updateRenderGraph()
|
<gh_stars>0
package com.ruoyi.file.service.impl;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.core.util.ZipUtil;
import cn.hutool.http.HttpUtil;
import com.aliyun.oss.OSSClient;
import com.aliyun.oss.model.PutObjectResult;
import com.qiniu.util.StringUtils;
import com.ruoyi.common.constant.SystemConfigConstants;
import com.ruoyi.common.enums.SystemConfigKeyEnum;
import com.ruoyi.common.exception.file.FileException;
import com.ruoyi.file.entity.FileUploader;
import com.ruoyi.file.mapper.FileUploaderMapper;
import com.ruoyi.file.service.IAliOssService;
import com.ruoyi.file.utils.FileUploadUtil;
import enums.FileUploadTypeEnum;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import javax.servlet.http.HttpServletResponse;
import java.io.InputStream;
import java.net.URL;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* <p>
* 基础文件信息 服务实现类
* </p>
*
* @author Ruoyi
* @since 2021-03-17
*/
@Slf4j
@Service
public class AliOssServiceImpl implements IAliOssService {
private OSSClient client;
private String accessKey;
private String accessKeySecret;
private String bucketName;
private String endpoint;
private String fileUrl;
@Autowired
private FileUploaderMapper fileUploaderMapper;
@Override
public void upload(InputStream file, FileUploader fileUploader, Map<String, String> configMap) {
this.init(configMap);
try {
// 设置临时文件路径
String uploadType = fileUploader.getUploadType();
String rootPath = StrUtil.isEmpty(uploadType) ? FileUploadTypeEnum.COMMON.toString() + "/" :
uploadType.endsWith("/") ? uploadType : uploadType + "/";
rootPath = rootPath + FileUploadUtil.createNewFileName(fileUploader.getOriginalFileName());
Date startTime = new Date();
// 上传
if (!this.client.doesBucketExist(this.bucketName)) {
throw new RuntimeException("[阿里云OSS] 无法上传文件!Bucket不存在:" + this.bucketName);
}
// 上传文件。
PutObjectResult result = this.client.putObject(bucketName, rootPath, file);
//设置返回的url有效期为10年
Date expiration = new Date(System.currentTimeMillis() + 10 * 365 * 24 * 60 * 60 * 1000);
URL url = this.client.generatePresignedUrl(bucketName, rootPath, expiration);
//解析上传成功的结果
fileUploader.setStorageType(SystemConfigConstants.SYS_FILE_PRIORITY_OSS);
fileUploader.setUploadStartTime(startTime);
fileUploader.setUploadEndTime(new Date());
fileUploader.setFilePath(rootPath);
fileUploader.setFileHash(result.getETag());
fileUploader.setFullFilePath(url.toString());
// 上传文件信息到数据库
fileUploaderMapper.insert(fileUploader);
} catch (Exception ex) {
throw new RuntimeException("七牛云文件上传失败");
} finally {
this.client.shutdown();
}
}
@Override
public void remove(FileUploader uploader, Map<String, String> configMap) {
this.init(configMap);
try {
boolean exists = this.client.doesBucketExist(this.bucketName);
if (!exists) {
throw new FileException("[阿里云OSS] 文件删除失败!Bucket不存在:" + this.bucketName, null);
}
if (!this.client.doesObjectExist(bucketName, uploader.getFilePath())) {
throw new FileException("[阿里云OSS] 文件删除失败!文件不存在:" + this.bucketName + "/" + uploader.getFilePath(), null);
}
this.client.deleteObject(bucketName, uploader.getFilePath());
} catch (Exception e){
throw new FileException("[阿里云OSS] 文件删除失败!", null);
} finally {
this.client.shutdown();
}
}
@Override
public void downloadFile(List<FileUploader> uploaders, HttpServletResponse response){
String localFilePath = null;
try {
localFilePath = System.getProperty("user.dir");
if (StrUtil.isEmpty(localFilePath)){
log.error("method: downloadFile line: 【114行】阿里云OSS下载>>>未查询到存储路径");
return;
}
String tempPath = localFilePath + "/temp/download/" + System.currentTimeMillis();
// 将所有相关文件复制到临时下载目录中去
String temlFileName;
for (FileUploader uploader : uploaders) {
temlFileName = tempPath + "/" + IdUtil.simpleUUID() + uploader.getSuffix();
HttpUtil.downloadFile(uploader.getFullFilePath(), FileUtil.file(temlFileName));
}
// 将临时下载目录下的文件压缩成zip
String zipPath = tempPath+".zip";
ZipUtil.zip(tempPath);
response.setContentType(MediaType.APPLICATION_OCTET_STREAM_VALUE);
FileUploadUtil.setAttachmentResponseHeader(response, FileUtil.getName(zipPath));
FileUploadUtil.writeBytes(zipPath, response.getOutputStream());
}catch (Exception e){
log.error("method: downloadFile line: 【134行】 阿里云OSS下载文件失败 发生的异常是",e);
}finally {
if (StrUtil.isNotEmpty(localFilePath)){
// 删除临时下载文件夹及文件
FileUtil.del(localFilePath+"/temp");
}
}
}
private void init(Map<String, String> configMap) {
// 获取阿里云OSS的系统配置并校验值
if (Objects.isNull(configMap)){
throw new FileException("配置信息为空,阿里云OSS文件上传功能暂时不可用!",null);
}
this.bucketName = configMap.get(SystemConfigKeyEnum.ALIYUN_BUCKET_NAME.getKey());
this.endpoint = configMap.get(SystemConfigKeyEnum.ALIYUN_ENDPOINT.getKey());
this.fileUrl = configMap.get(SystemConfigKeyEnum.ALIYUN_FILE_URL.getKey());
this.accessKey = configMap.get(SystemConfigKeyEnum.ALIYUN_ACCESS_KEY.getKey());
this.accessKeySecret = configMap.get(SystemConfigKeyEnum.ALIYUN_ACCESS_KEY_SECRET.getKey());
if (StringUtils.isNullOrEmpty(this.bucketName) ||
StringUtils.isNullOrEmpty(this.endpoint) ||
StringUtils.isNullOrEmpty(this.fileUrl)||
StringUtils.isNullOrEmpty(this.accessKey)||
StringUtils.isNullOrEmpty(this.accessKeySecret)) {
throw new FileException("尚未配置七牛云,文件上传功能暂时不可用!",null);
}
this.client = new OSSClient(endpoint, accessKey, accessKeySecret);
}
}
|
const { Command, MayfiEmbed, MiscUtils } = require('../../')
const fetch = require("node-fetch")
module.exports = class Cat extends Command {
constructor (client) {
super({
name: 'cat',
aliases: ['meow'],
category: 'fun',
}, client)
}
async run ({ channel, author, t}) {
const body = await fetch('https://api.thecatapi.com/v1/images/search').then(res => res.json())
let embed = new MayfiEmbed(author)
.setTitle("🐱")
.setImage(body[0].url)
channel.send({embed})
}
}
|
<filename>web/BookReader/BookReaderInit.js
//
// This file shows the BookReader initial configuration
//
// Copyright(c)2008-2009 Internet Archive. Software license AGPL version 3.
// Create the BookReader object
br = new BookReader();
br.mode = display_mode;
// Return the width of a given page. Here we assume all images are 800 pixels wide
br.getPageWidth = function(index) {
return pageW;
}
// Return the height of a given page. Here we assume all images are 1200 pixels high
br.getPageHeight = function(index) {
return pageH;
}
//using a different URL structure
br.getPageURI = function(index, reduce, rotate) {
var url;
if('undefined' != fileArray[index] && fileArray.length>0){
url = linkToFile + fileArray[index];
}else{
url = linkToFile + index + "." + extension;
}
return url;
}
// Return which side, left or right, that a given page should be displayed on
br.getPageSide = function(index) {
if (0 == (index & 0x1)) {
return 'R';
} else {
return 'L';
}
}
// This function returns the left and right indices for the user-visible
// spread that contains the given index. The return values may be
// null if there is no facing page or the index is invalid.
br.getSpreadIndices = function(pindex) {
var spreadIndices = [null, null];
if ('rl' == this.pageProgression) {
// Right to Left
if (this.getPageSide(pindex) == 'R') {
spreadIndices[1] = pindex;
spreadIndices[0] = pindex + 1;
} else {
// Given index was LHS
spreadIndices[0] = pindex;
spreadIndices[1] = pindex - 1;
}
} else {
// Left to right
if (this.getPageSide(pindex) == 'L') {
spreadIndices[0] = pindex;
spreadIndices[1] = pindex + 1;
} else {
// Given index was RHS
spreadIndices[1] = pindex;
spreadIndices[0] = pindex - 1;
}
}
return spreadIndices;
}
// For a given "accessible page index" return the page number in the book.
//
// For example, index 5 might correspond to "Page 1" if there is front matter such
// as a title page and table of contents.
br.getPageNum = function(index) {
return index;
}
// Total number of leafs
br.numLeafs = num_of_pages;
// Book title and the URL used for the book title link
br.bookTitle = general_title;
br.bookUrl = '';
// Override the path used to find UI images
br.imagesBaseURL = 'BookReader/images/';
br.getEmbedCode = function(frameWidth, frameHeight, viewParams) {
return "Embed code not supported in bookreader demo.";
}
// Let's go!
br.init();
//read-aloud and search need backend compenents and are not supported in the demo
$('#BRtoolbar').find('.read').hide();
$('#BRtoolbar').find('.share').hide();
$('#BRtoolbar').find('.info').hide();
$('#BRtoolbar').find('.logo').hide();
$('#BRtoolbar').find('#BRreturn').hide();
$('#textSrch').hide();
$('#btnSrch').hide();
|
<filename>test/test.js
var chai = require("chai"),
expect = chai.expect,
assert = chai.assert,
sinonChai = require('sinon-chai'),
sinon = require('sinon');
var helper = require('../lib/helper'),
calc = require('../lib/calc');
chai.use(sinonChai);
describe('Helper',function(){
describe('#log()',function(){
beforeEach(function() {
sinon.spy(console,'log');
});
afterEach(function() {
console.log.restore();
});
it('should log timestamp and message', function () {
var message = "Mocha test";
helper.log(message);
// datetime regex - http://stackoverflow.com/questions/3143070/javascript-regex-iso-datetime
var regexTest = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d\.\d+([+-][0-2]\d:[0-5]\d|Z)+\ \- .*/.test(console.log.getCall(0).args[0]);
expect(console.log).to.have.been.calledOnce;
expect(regexTest).to.be.true;
});
});
describe('#parse()',function(){
it('should return an array with a length of 3',function(){
expect(helper.parse('1+2=')).to.be.an('Array');
expect(helper.parse('1+2=')).to.have.length(3);
});
it('should return an integer for first element in array',function(){
expect(helper.parse('1+2=')[0]).to.equal('1');
});
it('should return a float for first element in array',function(){
expect(helper.parse('1.001+2=')[0]).to.equal('1.001');
});
it('should return null if first element in expression is not a number',function(){
expect(helper.parse('Q+2=')).to.be.null;
});
it('should return an operator for second element in array',function(){
expect(helper.parse('1+2=')[1]).to.equal('+');
});
it('should return null if second element in expression is not an operator',function(){
expect(helper.parse('1;2=')).to.be.null;
});
it('should return an integer for third element in array',function(){
expect(helper.parse('1+2=')[2]).to.equal('2');
});
it('should return a float for third element in array',function(){
expect(helper.parse('1+2.001=')[2]).to.equal('2.001');
});
it('should return null if third element in expression is not a number',function(){
expect(helper.parse('1+P=')).to.be.null;
});
});
});
describe('Calc',function(){
describe('#operators', function(){
it('should contain +,-,/,*', function () {
expect(calc.operators).to.include('+');
expect(calc.operators).to.include('-');
expect(calc.operators).to.include('/');
expect(calc.operators).to.include('*');
});
});
describe('#randomNumber()',function(){
it('should return an integer', function () {
var result = calc.randomNumber(1,10,true);
assert.isNumber(result);
var isInt = result % 1 === 0;
assert(isInt, 'not an integer: ' + result);
});
it('should return a float', function () {
var result = calc.randomNumber(1,10,false);
assert.isNumber(result);
var isInt = result % 1 !== 0;
assert(isInt, 'not a float: ' + result);
});
});
describe('#calculate()',function(){
it('should return correct answer to addition expression', function() {
var parsed = [2,'+',3];
var result = calc.calculate(parsed[0],parsed[1],parsed[2]);
expect(result).to.equal(5);
});
it('should return correct answer to subtraction expression', function() {
var parsed = [2,'-',3];
var result = calc.calculate(parsed[0],parsed[1],parsed[2]);
expect(result).to.equal(-1);
});
it('should return correct answer to multiplication expression', function() {
var parsed = [2,'*',3];
var result = calc.calculate(parsed[0],parsed[1],parsed[2]);
expect(result).to.equal(6);
});
it('should return correct answer to division expression', function() {
var parsed = [6,'/',3];
var result = calc.calculate(parsed[0],parsed[1],parsed[2]);
expect(result).to.equal(2);
});
it('should return an error for division by zero', function(){
var parsed = [2,'/',0];
expect(function() { calc.calculate(parsed[0],parsed[1],parsed[2]); }).to.throw('Division by zero not allowed.');
});
it('should return an error for invalid operator', function(){
var parsed = [2,'#',0];
expect(function() { calc.calculate(parsed[0],parsed[1],parsed[2]); }).to.throw('Invalid operator.');
});
});
});
|
#!/bin/bash
# Copyright 2019 - 2020 Crunchy Data Solutions, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [[ -v PGMONITOR_PASSWORD ]]
then
if [[ ${PGHA_INIT?} == "true" ]]
then
echo_info "PGMONITOR_PASSWORD detected. Enabling pgMonitor support."
source /opt/cpm/bin/common_lib.sh
export PGHOST="/tmp"
test_server "postgres" "${PGHOST?}" "${PGHA_PG_PORT}" "postgres"
VERSION=$(psql --port="${PG_PRIMARY_PORT}" -d postgres -qtAX -c "SELECT current_setting('server_version_num')")
if (( ${VERSION?} >= 90500 )) && (( ${VERSION?} < 90600 ))
then
function_file='/opt/cpm/bin/modules/pgexporter/setup_pg95.sql'
elif (( ${VERSION?} >= 90600 )) && (( ${VERSION?} < 100000 ))
then
function_file='/opt/cpm/bin/modules/pgexporter/setup_pg96.sql'
elif (( ${VERSION?} >= 100000 )) && (( ${VERSION?} < 110000 ))
then
function_file='/opt/cpm/bin/modules/pgexporter/setup_pg10.sql'
elif (( ${VERSION?} >= 110000 ))
then
function_file='/opt/cpm/bin/modules/pgexporter/setup_pg11.sql'
else
echo_err "Unknown or unsupported version of PostgreSQL. Exiting.."
exit 1
fi
echo_info "Using setup file '${function_file}' for pgMonitor"
cp "${function_file}" "/tmp/setup_pg.sql"
sed -i "s/\/usr\/bin\/pgbackrest-info.sh/\/opt\/cpm\/bin\/pgbackrest_info.sh/g" "/tmp/setup_pg.sql"
# TODO Add ON_ERROR_STOP and single transaction when
# upstream pgmonitor changes setup SQL to check if the
# role exists prior to creating it.
psql -U postgres --port="${PG_PRIMARY_PORT}" -d postgres \
< "/tmp/setup_pg.sql" > /tmp/pgmonitor-setup.stdout 2> /tmp/pgmonitor-setup.stderr
#err_check "$?" "pgMonitor Setup" "Could not load pgMonitor functions: \n$(cat /tmp/pgmonitor.stderr)"
psql -U postgres --port="${PG_PRIMARY_PORT}" -d postgres \
-c "ALTER ROLE ccp_monitoring PASSWORD '${PGMONITOR_PASSWORD?}'" \
> /tmp/pgmonitor-alter-role.stdout 2> /tmp/pgmonitor-alter-role.stderr
#err_check "$?" "pgMonitor User Setup" "Could not alter ccp_monitor user's password: \n$(cat /tmp/pgmonitor.stderr)"
fi
fi
|
<gh_stars>1-10
/*
Created by <NAME> on 30/11/16.
*/
package main.scala.slaves
import java.util.concurrent.LinkedBlockingQueue
import scala.collection.mutable.ListBuffer
import scala.util.control.Breaks._
class SummerClass(queue: LinkedBlockingQueue[Double]) extends Runnable {
val batch_size = 3200
private val sharedQueue = queue
def run() {
while (true) {
try {
var input_data_list_buffer = new ListBuffer[Double]()
var element = 0.0
for (i <- 1 to batch_size) {
element = sharedQueue.take()
if (element != null)
input_data_list_buffer += element
else
break
}
val resultFromProcessor = summer(input_data_list_buffer.toList)
} catch {
case ex: InterruptedException => println("Interrupted Exception")
}
}
}
//Processor
def summer(input_data_list: List[Double]): Double = {
return input_data_list.foldLeft(0.00)(_ + _)
}
} |
def print_string(text):
if len(text) > 0:
print(text)
else:
raise Exception('The input string is empty') |
<filename>acmicpc.net/source/9506.cpp
// 9506. 약수들의 합
// 2019.05.22
// 수학
#include<iostream>
#include<vector>
#include<algorithm>
using namespace std;
int main()
{
while (1)
{
int n;
cin >> n;
if (n == -1)
{
break;
}
vector<int> v;
// 자기 자신을 제외한 약수 저장
v.push_back(1);
for (int i = 2; i*i <= n; i++)
{
if (n%i == 0)
{
v.push_back(i);
v.push_back(n / i);
}
}
// 출력을 위한 정렬
sort(v.begin(), v.end());
int sum = 0;
for (int i = 0; i < v.size(); i++)
{
sum += v[i];
}
if (sum == n) // 완전수 일 때
{
cout << n << " = ";
for (int i = 0; i < v.size()-1; i++)
{
cout << v[i] << " + ";
}
cout << v[v.size() - 1] << endl;
}
else // 완전수가 아닐 때
{
cout << n << " is NOT perfect." << endl;
}
}
return 0;
}
|
#!/bin/bash
# CUDA
# Patch CUDA so that we can use clang 11
FILE=/usr/local/cuda/include/crt/host_config.h
BAK_FILE=/usr/local/cuda/include/crt/host_config.h.bak
if [ ! -f "$BAK_FILE" ] ; then
cp $FILE $BAK_FILE
fi
cat $BAK_FILE | sed 's,__clang_major__ >= [0-9]\+,__clang_major__ >= 20,' > $FILE
|
#!/bin/bash
# Retrieve the IP addresses of Docker containers
GUI_IP=$(/usr/bin/docker inspect --format='{{.NetworkSettings.IPAddress}}' son-gui)
BSS_IP=$(/usr/bin/docker inspect --format='{{.NetworkSettings.IPAddress}}' son-bss)
GTK_IP=$(/usr/bin/docker inspect --format='{{.NetworkSettings.IPAddress}}' son-gtkapi)
# Set up firewall rules for Docker containers
iptables -I PRE_DOCKER -i eth0 -p tcp -d $GUI_IP --dport 80 -j ACCEPT
iptables -I PRE_DOCKER -i eth0 -p tcp -d $BSS_IP --dport 25001 -j ACCEPT
iptables -I PRE_DOCKER -i eth0 -p tcp -d $GTK_IP --dport 32001 -j ACCEPT
# Insert the PRE_DOCKER table before the DOCKER table in the FORWARD chain
iptables -I FORWARD -o docker0 -j PRE_DOCKER
echo "firewall.sh done." |
int add(int a, int b) {
int sum = 0;
if (a > 0 && b > 0) {
sum = a + b;
} else if (a > 0) {
sum = a;
} else if (b > 0) {
sum = b;
} else {
sum = 0;
}
return sum;
} |
<filename>util/serve_files.js
const { join } = require('path');
const fs = require('fs');
const mime = require('mime/lite');
const { stat, readFile, readdir, mkdir, writeFile, rmdir, unlink, rename, copyFile } = fs.promises;
async function sendFile(res, file, stats) {
const headers = {
'Content-Length': stats.size,
'Content-Type': 'text/plain' //mime.getType(file)
};
res.writeHead(200, headers);
res.end(await readFile(file, { encoding: 'utf8' }));
}
async function sendDirectory(res, filePath) {
const items = await readdir(filePath, { withFileTypes: true });
res.json(items
.filter(item => !item.name.startsWith('.') && (item.isDirectory() || item.isFile()))
.map(item => item.name + (item.isDirectory() ? '/' : ''))
);
}
async function copyDirectory(filePath, newPath) {
await mkdir(newPath);
const items = await readdir(filePath, { withFileTypes: true });
return Promise.all(items
.map(item => (item.isDirectory() ? copyDirectory : copyFile)(
join(filePath, item.name),
join(newPath, item.name)
)));
}
async function makeOne(filePath, contents, secondTry = false) {
try {
// TODO: set proper access rights mode
if (filePath.endsWith('/')) await mkdir(filePath);
else await writeFile(filePath, contents, 'utf8');
}
catch (e) {
if (e.code === 'ENOENT') {
if (secondTry) throw e;
await makeOne(join(filePath, '../'));
return makeOne(filePath, contents, true);
}
if (e.code !== 'EEXIST') throw e;
}
}
function makeResources(filePath, files) {
files = new Map(files
.filter(Array.isArray)
.map(([path, contents]) => [join(filePath, decodeURI(path)), contents && String(contents)])
.filter(([path]) => path.startsWith(filePath))
);
return Promise.all([...files].map(pair => makeOne(...pair)));
}
const serve = root => async (req, res) => {
const filePath = join(root, decodeURI(req.path));
console.log(req.method + ' ' + filePath);
if (Object.keys(req.body).length) console.log(req.body);
if (!filePath.startsWith(root)) {
return res.error('Unauthorized', 401);
}
try {
const stats = await stat(filePath);
if (req.method === 'GET') {
if (stats.isDirectory()) await sendDirectory(res, filePath);
else if (stats.isFile()) await sendFile(res, filePath, stats);
else return res.error('Unsupported resource type', 400);
}
else if (req.method === 'PUT') {
const { files } = req.body;
if (!stats.isDirectory()) return res.error(`${filePath} is not a directory`, 400);
if (!Array.isArray(files)) return res.error('Bad files argument', 400);
await makeResources(filePath, files);
}
else if (req.method === 'DELETE') {
if (stats.isDirectory()) await rmdir(filePath, { recursive: true });
else await unlink(filePath);
}
else if (req.method === 'POST') {
const { action, destination } = req.body;
const newPath = join(root, decodeURI(destination));
if (!newPath.startsWith(root)) return res.error('Unauthorized', 401);
if (action === 'move') {
await rename(filePath, newPath);
}
else if (action === 'copy') {
if (stats.isDirectory()) await copyDirectory(filePath, newPath);
else await copyFile(filePath, newPath);
}
else return res.error('Unrecognized action: ' + action, 400);
}
else return res.error('Unsupported method', 400);
res.end('success');
}
catch (e) {
if (e.code === 'ENOENT') res.error('Not found', 404);
else res.error(e.code, 400);
}
}
module.exports = serve;
|
<reponame>jeffrey-xiao/acm-notebook<filename>codebook/string/Z_Algorithm.cc
/*
* Produces an array Z where Z[i] is the length of the longest substring
* starting from S[i] which is also a prefix of S.
*
* Time: O(N) construction
* Memory: O(N)
*/
#include <bits/stdc++.h>
using namespace std;
vector<int> compute(string s) {
vector<int> z(s.size());
int l = 0, r = 0;
for (int i = 1; i < (int)s.size(); i++) {
if (i > r) {
l = r = i;
while (r < (int)s.size() && s[r] == s[r - l])
r++;
r--;
z[i] = r - l + 1;
} else {
int j = i - l;
if (z[j] < r - i + 1)
z[i] = z[j];
else {
l = i;
while (r < (int)s.size() && s[r] == s[r - l])
r++;
r--;
z[i] = r - l + 1;
}
}
}
return z;
}
|
<filename>XS_RS232_DataLogger.py
#!/usr/bin/env python2
'''
Created Sep - Nov 2019
DataLogger XS60002S
@author: <NAME> (CMAC, <EMAIL>, GitHub: https://github.com/frederik-d)
Setup:
Find Ard: ls -l /dev/ttyUSB*
Enable USB: sudo chmod 666 /dev/ttyUSB1
Path: /home/pi/B290_XS_RS232_Data/XS_RS232_DataLogger.py
Make shell excecutable:
sudo chmod +x /home/pi/Desktop/DataLogger_XS_RS232_autorun.sh
Minimum Working Example:
import os
import time
import datetime
import serial
DL_Port = '/dev/ttyUSB1'
DL_file_path = '/home/pi/B290_XS_RS232_Data'
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d_%H-%M-%S')
file_name ='Data_SprayDry_XS_RS232_' + st + '.txt'
file_Sensor_DataLog = open(os.path.join(DL_file_path,file_name),'a',0)
print 'SprayDry_XS_RS232_DataLogger\n' + 'StartTime: ' + st + ':\n'
while True:
ser = serial.Serial(DL_Port, 9600)
time.sleep(0.75)
value_string = ser.readline()
time.sleep(0.5)
value_string = ser.readline()
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d_%H-%M-%S')
file_Sensor_DataLog.write(st + ';' + value_string)
print st + '\t' + value_string
ser.close()
time.sleep(0.75)
'''
import os
import time
import serial
import datetime
import glob
import logging
import tkMessageBox
import multiprocessing
from multiprocessing import Process
class DataLogger(object):
def __init__(self):
self.DL_Port = '/dev/ttyUSB1'
self.DL_file_path = '/home/pi/B290_XS_RS232_Data'
def ensure_dir(self):
if not os.path.exists(self.DL_file_path):
os.makedirs(self.DL_file_path)
def createDataFile(self,file_path,file_name):
self.file_ID = open(os.path.join(file_path,file_name),'a',0)
self.file_ID.write('SprayDry XS_RS232 DataLogger\n')
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d_%H-%M-%S')
self.file_ID.write('StartTime: ' + st + ':\n')
return self.file_ID
def read_DataLogger(self,file_Sensor_DataLog):
value = ser.read()
value_string = ''
while value.lower() != '\n':
value_string += value
value = ser.read()
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d_%H-%M-%S')
file_Sensor_DataLog.write(st + ';' + value_string + '\n')
def run_DataLogger(self):
current_time = 0
delay_time = 1
self.ensure_dir()
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d_%H-%M-%S')
file_name ='Data_SprayDry_XS_RS232_' + st + '.txt'
value_string = ''
ser_Check = False
print 'Initiate Logging'
while True:
if not os.path.isfile(os.path.join(self.DL_file_path,file_name)):
file_Sensor_DataLog = self.createDataFile(self.DL_file_path,file_name)
print 'SprayDry_XS_RS232_DataLogger\n' + 'StartTime: ' + st + ':\n'
ser = serial.Serial(self.DL_Port, 9600)
time.sleep(0.75)
value_string = ser.readline()
time.sleep(0.5)
value_string = ser.readline()
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d_%H-%M-%S')
file_Sensor_DataLog.write(st + ';' + value_string)
print st + '\t' + value_string
ser.close()
time.sleep(0.75)
current_time = current_time + delay_time
if ser_Check:
file_Sensor_DataLog.close()
ser.close()
def portIsUsable(portName):
try:
ser = serial.Serial(port=portName)
ser.close()
return True
except:
return False
if __name__ == '__main__':
multiprocessing.log_to_stderr()
logger = multiprocessing.get_logger()
logger.setLevel(logging.INFO)
dl = DataLogger()
p2 = Process(target=DataLogger().run_DataLogger())
p2.start()
|
import requests
import bs4
url = 'https://example.com'
while True:
response = requests.get(url)
soup = bs4.BeautifulSoup(response.text, 'html.parser')
# Extract information here
next_page_url = soup.find('a', {'class': 'next-page'})
if next_page_url:
url = next_page_url['href']
else:
break |
esptool.py --port /dev/ttyUSB0 erase_flash
esptool.py --port /dev/ttyUSB0 write_flash 0x1000 esp32-20180511-v1.9.4-2-g9630376d.bin
|
/*
*
* Copyright © ${year} ${name}
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gwtproject.resources.rg;
import java.net.URL;
import javax.lang.model.element.ExecutableElement;
import org.gwtproject.resources.ext.*;
import org.gwtproject.resources.rg.util.SourceWriter;
import org.gwtproject.resources.rg.util.StringSourceWriter;
import org.gwtproject.resources.rg.util.Util;
/** @author <NAME> Created by treblereel 11/13/18 */
public final class TextResourceGenerator extends AbstractResourceGenerator {
/**
* Java compiler has a limit of 2^16 bytes for encoding string constants in a class file. Since
* the max size of a character is 4 bytes, we'll limit the number of characters to (2^14 - 1) to
* fit within one record.
*/
private static final int MAX_STRING_CHUNK = 16383;
@Override
public String createAssignment(
TreeLogger logger, ResourceContext context, ExecutableElement method)
throws UnableToCompleteException {
ResourceOracle resourceOracle = context.getGeneratorContext().getResourcesOracle();
URL[] resources = resourceOracle.findResources(logger, method);
if (resources.length != 1) {
logger.log(TreeLogger.ERROR, "Exactly one resource must be specified", null);
throw new UnableToCompleteException();
}
URL resource = resources[0];
SourceWriter sw = new StringSourceWriter();
// Write the expression to create the subtype.
sw.println("new " + method.getReturnType() + "() {");
sw.indent();
if (!AbstractResourceGenerator.STRIP_COMMENTS) {
// Convenience when examining the generated code.
sw.println("// " + resource.toExternalForm());
}
sw.println("public String getText() {");
sw.indent();
String toWrite = Util.readURLAsString(resource);
if (toWrite.length() > MAX_STRING_CHUNK) {
writeLongString(sw, toWrite);
} else {
sw.println("return \"" + Generator.escape(toWrite) + "\";");
}
sw.outdent();
sw.println("}");
sw.println("public String getName() {");
sw.indent();
sw.println("return \"" + method.getSimpleName() + "\";");
sw.outdent();
sw.println("}");
sw.outdent();
sw.println("}");
return sw.toString();
}
/**
* A single constant that is too long will crash the compiler with an out of memory error. Break
* up the constant and generate code that appends using a buffer.
*/
private void writeLongString(SourceWriter sw, String toWrite) {
sw.println("StringBuilder builder = new StringBuilder();");
int offset = 0;
int length = toWrite.length();
while (offset < length - 1) {
int subLength = Math.min(MAX_STRING_CHUNK, length - offset);
sw.print("builder.append(\"");
sw.print(Generator.escape(toWrite.substring(offset, offset + subLength)));
sw.println("\");");
offset += subLength;
}
sw.println("return builder.toString();");
}
}
|
ScriptInfo_backup_folder_archive_functions() {
SCRIPT_NAME="Backup Folder Archive"; SCRIPT_VERSION="3.0"; SCRIPT_DATE="2020-02-20"; SCRIPT_AUTHER="Ben Batschelet"; SCRIPT_AUTHER_CONTACT="ben.batschelet@gmail.com"
SCRIPT_DESCRIPTION="Used to backup a single folder"
SCRIPT_TITLE="Dependency: $SCRIPT_NAME - v$SCRIPT_VERSION - $SCRIPT_DATE - $SCRIPT_AUTHER ($SCRIPT_AUTHER_CONTACT) \n ∟ Description: $SCRIPT_DESCRIPTION"
echo -e " $(YEL "▶︎") $SCRIPT_TITLE";
}
# ----------------------------------------------------------------------------------------------------------------------
# General Usage: backupArchive "backup_task_name" "backup_source" "backup_destination_directory" "backup_destination_filename_prefix" "backup_note" "backup_destination_filename_suffix" "backup_retention_number"
# ======================================================================================================================
# Notes:
# Uses Color Functions Script (source /.../color_text_functions.sh)
# ======================================================================================================================
# Check if this script is already loaded
if [ -z $script_loaded_backup_folder_archive_functions ]; then
# Output Script Title
ScriptInfo_backup_folder_archive_functions
# [# Global Static Variables #]
SCRIPT_DEPENDENCIES_DIRECTORY="$(dirname $0)/dependencies"
# [# Included Libraries & Scripts #] -------------------------------------------------------------------------------
source "$SCRIPT_DEPENDENCIES_DIRECTORY/files_and_folder_functions.sh"
source "$SCRIPT_DEPENDENCIES_DIRECTORY/color_text_functions.sh"
# [# Functions #] --------------------------------------------------------------------------------------------------
# ALL SOURCED & INCLUDED
# [# Main Function #] ----------------------------------------------------------------------------------------------
backupArchive()
{
# [# Read Passed Varables #] -----------------------------------------------------------------------------------
BRK 2
# [# Passed Variables Defaults #] Load Default Varables
backup_task_name="Backup Folder to Archive"
backup_source=""
backup_destination_directory=""
backup_destination_filename_prefix="" # "prefix_"
backup_note="note"
backup_destination_filename_suffix="" # "_suffix"
backup_retention_number=64
backup_exclude_file=""
# Set Backup Task Name
if [ -z "$1" ]; then
YEL "Backup Task Name $(WHT "[") $(RED "NOT Passed") $(WHT "]")"
else
YEL "Backup Task Name $(WHT "[") $(GRN "Passed") $(WHT "]")"
backup_task_name="$1"
fi
BLU "∟ Using: $(WHT "$backup_task_name")"
# Set Backup Source Folder Path
if [ -z "$2" ]; then
YEL "Backup Source Folder Path $(WHT "[") $(RED "NOT Passed") $(WHT "]")"
else
YEL "Backup Source Folder Path $(WHT "[") $(GRN "Passed") $(WHT "]")"
backup_source="$2"
fi
BLU "∟ Using: $(WHT "$backup_source")"
# Set Backup Root Destination Directory
if [ -z "$3" ]; then
YEL "Backup Root Destination Directory $(WHT "[") $(RED "NOT Passed") $(WHT "]")"
else
YEL "Backup Root Destination Directory $(WHT "[") $(GRN "Passed") $(WHT "]")"
backup_destination_directory="$3"
fi
BLU "∟ Using: $(WHT "$backup_destination_directory")"
# Set Backup Destination Filename Prefix
if [ -z "$4" ]; then
YEL "Backup Destination Filename Prefix $(WHT "[") $(RED "NOT Passed") $(WHT "]")"
else
YEL "Backup Destination Filename Prefix $(WHT "[") $(GRN "Passed") $(WHT "]")"
backup_destination_filename_prefix="$4"
fi
BLU "∟ Using: $(WHT "$backup_destination_filename_prefix")"
# Set Backup Note ( Is put on Destination File)
if [ -z "$5" ]; then
YEL "Backup Note $(WHT "[") $(RED "NOT Passed") $(WHT "]")"
else
YEL "Backup Note $(WHT "[") $(GRN "Passed") $(WHT "]")"
backup_note="$5"
fi
BLU "∟ Using: $(WHT "$backup_note")"
# Set Backup Destination Filename Suffix
if [ -z "$6" ]; then
YEL "Backup Destination Filename Suffix $(WHT "[") $(RED "NOT Passed") $(WHT "]")"
else
YEL "Backup Destination Filename Suffix $(WHT "[") $(GRN "Passed") $(WHT "]")"
backup_destination_filename_suffix="$6"
fi
BLU "∟ Using: $(WHT "$backup_destination_filename_suffix")"
# Set Number of Backups to Keep (Of same Prefix & Suffix in Destination Directory)
if [ -z "$7" ]; then
YEL "Number of Backups to Keep $(WHT "[") $(RED "NOT Passed") $(WHT "]")"
else
YEL "Number of Backups to Keep $(WHT "[") $(GRN "Passed") $(WHT "]")"
backup_retention_number="$7"
fi
BLU "∟ Using: $(WHT "$backup_retention_number")"
# Set Source for paths to exclude
if [ -z "$8" ]; then
YEL "Exclude File Path $(WHT "[") $(RED "NOT Passed") $(WHT "]")"
else
YEL "Exclude File Path $(WHT "[") $(GRN "Passed") $(WHT "]")"
backup_exclude_file="$8"
fi
BLU "∟ Using: $(WHT "$backup_exclude_file")"
# [# Main #] ---------------------------------------------------------------------------------------------------
# [# Variables #]
backup_destination_filename="$backup_destination_filename_prefix$(date +%Y-%m-%d_%H%M%S)_($backup_note)$backup_destination_filename_suffix.tar.gz"
backup_destination_path="$backup_destination_directory/$backup_destination_filename"
# [# Output Task #]
BRK 2
WHT "Backup Folder Archive Task: $backup_task_name ($backup_note)"
WHT "Source: $backup_source"
WHT "Destination: $backup_destination_path"
# Purge/Cleanup Old Backups
cleanupFile "$backup_destination_directory" $backup_retention_number ".*$backup_destination_filename_prefix.*$backup_destination_filename_suffix"
# Create Backup Folder for This Backup
checkFolder "$backup_destination_directory"
# Create Archive from Folder
NRM "Archiving: $(BLK "$backup_source") -> $(BLK "$backup_destination_path")"
# Check if Source & Destination Exist
if [ -d "$backup_source" ] && [ -d "$backup_destination_directory" ]; then
if [ -f "$backup_exclude_file" ]; then
# Preform backup with paths excluded
tar -zc -X "$backup_exclude_file" -f "$backup_destination_path" "$backup_source"
else
# Preform standard opterations
tar -zcf "$backup_destination_path" "$backup_source"
fi
else
WRN "Backup Source or Destination NOT Valid"
fi
# Output
BRK 8
}
# Set Script Initialize Variable
script_loaded_backup_folder_archive_functions=true
fi
|
<gh_stars>0
import { Template } from 'meteor/templating';
import { ReactiveDict } from 'meteor/reactive-dict';
import { PollingData } from '../../api/imagedata/polingdata.js';
const displayErrorMessages = 'displayErrorMessages';
Template.polls.onCreated(function onCreated() {
this.messageFlags = new ReactiveDict();
this.messageFlags.set(displayErrorMessages, false);
this.subscribe('PollingData');
this.inputs = new ReactiveVar([])
this.totalCount = 0;
});
Template.polls.helpers({
errorClass() {
return Template.instance().messageFlags.get(displayErrorMessages) ? 'error' : '';
},
polls() {
return PollingData.find();
},
viewersCount : function(viewer, attending){
if (attending === "Yes"){
Template.instance().totalCount = Template.instance().totalCount + viewer.length + 1;
}
return (viewer.length + 1 || 0);
},
viewTotal () {
return Template.instance().totalCount;
},
inputs: function () {
return Template.instance().inputs.get();
}
});
Template.polls.events({
'click #addNewValue' (event, template) {
// Prevent default browser form submit
event.preventDefault();
var inputs = template.inputs.get();
inputs.push("attendee");
template.inputs.set(inputs);
$('#name').focus();
$('#attendees').focus();
},
'click #removeNewValue' (event, template) {
// Prevent default browser form submit
event.preventDefault();
var inputs = template.inputs.get();
inputs.pop("attendee");
template.inputs.set(inputs);
$('#name').focus();
$('#attendees').focus();
},
'click #submitAll'(event, template) {
// Prevent default browser form submit
event.preventDefault();
function isBlank(str) {
return (!str || /^\s*$/.test(str));
}
// Get name
const name = document.getElementById("name").value;
if (isBlank(name)){
alert("Your name is the minimum required to add to the Guest List");
$('#name').focus();
$('#attendees').focus();
}
else {
// Get attending info
const attending = template.$('input').is(":checked");
var willAttend = "";
if (attending) {
willAttend = "Yes";
} else {
willAttend = "No";
}
// Get list of additional people
var attendeeListDom = document.getElementsByName("attendees");
var attendeeList = [];
var i;
for (i = 0; i < attendeeListDom.length; i++) {
attendeeList.push(" " + attendeeListDom[i].value);
}
// Insert a task into the collection
PollingData.insert({
name,
attendeeList,
willAttend,
createdAt: new Date(), // current time
});
// Clear form
document.getElementById("name").value = '';
document.getElementById("yes").checked = '';
template.inputs.set([]);
$('#name').focus();
$('#attendees').focus();
}
},
});
|
# boxes : [ quantity, boxtype, array of box names, box host] ; if box host is a string, all will be on it, if array, number must match quantity and says where each will be
ansible-playbook -i inventory createdemo.yml --extra-vars='{"boxes":[{"quantity": 2, "role": "coreos", "title": ["docker-1", "docker-2"], "parent": "vagrant"}, {"quantity": 2, "role": "redis", "title": ["redis-1", "redis-2"], "parent": ["docker-1", "docker-2"]}, {"quantity": 1, "role": "ubuntu", "title": ["trusty"], "parent": "ec2"}], "awsregion": "us-east-1", "awskeypair": "datadog-demo" }'
# ansible-playbook -i inventory createdemo.yml --extra-vars='{"boxes": [1, 2, 3]}'
|
const withBundleAnalyzer = require("@next/bundle-analyzer")({
enabled: process.env.ANALYZE === "true",
});
const config = {
pageExtensions: ["js", "jsx", "mdx"],
eslint: {
// Warning: Dangerously allow production builds to successfully complete even if
// your project has ESLint errors.
ignoreDuringBuilds: true,
},
};
module.exports = withBundleAnalyzer(config);
|
<gh_stars>0
package cn.st.aop;
/**
* @description:
* @author: st
* @create: 2021-02-01 09:59
**/
public class Target implements TargetInterface{
@Override
public void save() {
// int i = 1/0;
System.out.println("save running.....");
}
}
|
/*包含常用polyfill、常用工具函数(如ajax等)、AMD模块加载*/
/*此版本不包含DOM查询、DOM批量操作和DOM事件封装*/
var Sky=function(){
return Sky.overload(arguments,this);
};
this.$=this.$ || Sky;
(function(){
var rules=[];
function ckeck(ckeckFunc,index){
return ckeckFunc(this[index]);
}
function compare(x, y){//比较函数
return x.checks.length-y.checks.length;
}
Sky.overload=function(checks,func,target){
if(target){
rules.push({
'checks':checks,
'func':func,
'target':target
});
rules.sort(compare);
}else{
var args=checks;
var thisVal=func;
var i=rules.length;
while(i--){
var rule=rules[i];
if(args.callee===rule.func){
if(rule.checks.length>=args.length){
if(rule.checks.every(ckeck,args)){
return rule.target.apply(thisVal,args);
}
}
}
}
return Sky;
}
};
})();
Sky.isArray=function(a){
return Array.isArray(a);
};
Sky.isDate=function(obj){
return Object.prototype.toString.call(obj)==='[object Date]';
};
Sky.isRegExp=function(obj){
return Object.prototype.toString.call(obj)==='[object RegExp]';
};
Sky.isString=function(obj){
return Object.prototype.toString.call(obj)==='[object String]';
};
Sky.isFunction=function(obj){
return Object.prototype.toString.call(obj)==='[object Function]';
};
Sky.isNumber=function(obj){
return Object.prototype.toString.call(obj)==='[object Number]';
};
Sky.is=function(obj,Clazz){
obj=Object(obj);
return obj instanceof Clazz;
};
Sky.isObject=function(obj){
var type=typeof obj;
if(type!=="object"){
return false;
}
type=Object.prototype.toString.call(obj);
switch(type){
case '[object String]':
case '[object Number]':
case '[object Function]':
case '[object Boolean]':
return false;
}
return true;
};
Sky.isDefined=function(obj){
return obj!==void 0;
};
Sky.isWindow=function(obj){
return obj && typeof obj === "object" && "setInterval" in obj;
};
Sky.isPlainObject=function(obj){
if(obj===null){
return true;
}
if(typeof obj!=="object" || obj.nodeType || Sky.isWindow(obj)){
return false;
}
return Object.getPrototypeOf(obj)===Object.prototype;
};
Sky.isArrayLike=function(obj){
var length=obj.length;
if(typeof length !="number" || length<0 || isNaN(length) || Math.ceil(length)!=length){
return false;
}
return true;
};
Sky.isNumeric=function(obj){
var n=parseFloat(obj);
return !isNaN(n);
};
if(this.HTMLElement){
Sky.isElement=function(obj){
return obj instanceof HTMLElement;
};
}else{
Sky.isElement=function(obj){
return obj?obj.nodeType===1:false;
};
}
Sky.isEmpty=function(obj){
if(obj==null) return true;
if(Sky.isNumber(obj.length)){
return !obj.length;
}
if(Sky.isNumber(obj.size)){
return !obj.size;
}
if(Sky.isFunction(obj.size)){
return !obj.size();
}
if(Sky.isFunction(obj.toArray)){
return !obj.toArray().length;
}
return false;
};
Sky.isArrayLike=function(obj){
var length=obj.length;
if(typeof length !="number" || length<0 || isNaN(length) || Math.ceil(length)!=length){
return false;
}
return true;
};
Sky.isNumeric=function(obj){
var n=parseFloat(obj);
return !isNaN(n);
};
Sky.isDocument=function(obj){
return obj===document;
};
Sky.support={};
(function(){
var userAgent = navigator.userAgent.toLowerCase();
Sky.browser={
version:(userAgent.match( /.+(?:rv|it|ra|ie)[\/: ]([\d.]+)/ ) || [])[1],
webkit:/webkit/.test( userAgent ),
opera:/opera/.test( userAgent ),
msie:/msie/.test( userAgent ) && !/opera/.test( userAgent ),
firefox:/firefox/.test( userAgent ),
safari:/safari/.test( userAgent ),
chrome:/chrome/.test( userAgent ),
android:/android/.test( userAgent ),
ios:/(iphone|ipad|ipod)/.test( userAgent ),
mobile:/mobile/.test( userAgent ),
quirks:(document.compatMode == 'BackCompat')
};
var ie="ActiveXObject" in window;
Sky.browser.ie5=ie&&!document.compatMode;//ie5及以下
Sky.browser.ie6=ie&&!!document.compatMode&&!window.XMLHttpRequest;
Sky.browser.ie7=ie&&!!window.XMLHttpRequest&&!document.querySelector;
Sky.browser.ie8=ie&&!!document.querySelector&&!document.addEventListener;
Sky.browser.ie9=ie&&!!document.addEventListener&&!window.atob;
Sky.browser.ie10=ie&&!!window.atob&&!!window.attachEvent;
Sky.browser.ie11=ie&&!!window.atob&&!window.attachEvent;
if(Sky.browser.ie11){
Sky.browser.ie=11;
}else if(ie){
Sky.browser.ie=parseInt(Sky.browser.version);
}
})();
Sky.noop=function(){};
Sky.support.VBScript=false;
if(window.execScript){
try{
window.execScript([
'Function alert(msg)',
'msgbox msg',
'End Function' //去除弹窗的图标
].join('\n'), 'VBScript');
if(typeof alert=="unknown"){
Sky.support.VBScript=true;
}
}catch(e){}
}
if(!Array.from){
Array.from=function(arrayLike, mapFn, thisArg){
var arr;
try{
arr=Array.prototype.slice.call(arrayLike);
}catch(e){
arr=new Array();
for(var i=0;i<arrayLike.length;i++){
arr.push(arrayLike[i]);
}
}
if(mapFn){
arr=arr.map( mapFn, thisArg);
}
return arr;
};
}
if(!Array.isArray){
Array.isArray=function(obj){
return Object.prototype.toString.call(obj)==='[object Array]';
};
}
//判断一个元素在数组中的位置
if(!Array.prototype.indexOf){
Array.prototype.indexOf=function(e,fromIndex){
fromIndex=isNaN(fromIndex)?0:fromIndex;
for(var i=fromIndex,j;i<this.length; i++){
j=this[i];
if(j===e){return i;}
}
return -1;
};
}
if(!Array.prototype.includes){
Array.prototype.includes=function(search,start){
return this.indexOf(search, start)!==-1;
};
}
if(!Array.prototype.lastIndexOf){
Array.prototype.lastIndexOf = function(e, fromIndex) {
fromIndex=isNaN(fromIndex)?this.length-1:fromIndex;
for (var i=fromIndex,j; i<this.length; i--) {
j=this[i];
if(j===e){return i;}
}
return -1;
};
}
if(!Array.prototype.findIndex){
Array.prototype.findIndex = function(callback, thisArg) {
for(var i=0,j; i<this.length; i++){
j=this[i];
var r=callback.call(thisArg,j,i,this);
if(r){
return i;
}
}
return -1;
};
}
if(!Array.prototype.find){
Array.prototype.find = function(callback, thisArg) {
var i=this.findIndex(callback, thisArg);
if(i>=0){
return this[i];
}
};
}
//遍历数组
if(!Array.prototype.forEach){
Array.prototype.forEach =function(callback, thisArg){
var len=this.length;
for(var i=0,j;i<len && i<this.length; i++){
j=this[i];
callback.call(thisArg,j,i,this);
}
};
}
if(!Array.prototype.map){
Array.prototype.map = function(fn, context) {
var arr = [];
for (var k = 0, length = this.length; k < length; k++) {
arr.push(fn.call(context, this[k], k, this));
}
return arr;
};
}
if(!Array.prototype.filter){
Array.prototype.filter = function(fn, context) {
var arr = [];
for (var k = 0, length = this.length; k < length; k++) {
fn.call(context, this[k], k, this) && arr.push(this[k]);
}
return arr;
};
}
if(!Array.prototype.some){
Array.prototype.some = function(fn, context) {
var passed = false;
for (var k = 0, length = this.length; k < length; k++) {
if (passed === true) break;
passed = !!fn.call(context, this[k], k, this);
}
return passed;
};
}
if(!Array.prototype.every){
Array.prototype.every = function(fn, context) {
var passed = true;
for (var k = 0, length = this.length; k < length; k++) {
if (passed === false) break;
passed = !!fn.call(context, this[k], k, this);
}
return passed;
};
}
if(!Array.prototype.reduce){
Array.prototype.reduce=function(callback,initialValue){
var value=initialValue;
for (var i=0;i<this.length;i++) {
if (i in this) {
value=callback(value,this[i],i,this);
}
}
return value;
};
}
(function(){//TODO
function Iterator(arr){
this.array=arr;
this.i=0;
}
Iterator.prototype.next=function(){
var result={};
result.done=this.array.length<=this.i;
result.value=this.array[this.i];
if(!result.done){
this.i++;
}
return result;
};
Array.prototype.entries=function(){
return new Iterator(this);
};
})();
//删除左右两端的空格
if(!String.prototype.trim){
String.prototype.trim=function() {
return this.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,'');
};
}
if(!String.prototype.trimStart){
String.prototype.trimStart=function() {
return this.replace(/^[\s\uFEFF\xA0]+/g,'');
};
}
if(!String.prototype.trimEnd){
String.prototype.trimEnd=function() {
return this.replace(/[\s\uFEFF\xA0]+$/g,'');
};
}
if(!String.prototype.startsWith){
String.prototype.startsWith=function(prefix,position){
if(prefix===null){ return false;}
position=position?position:0;
return this.slice(position, prefix.length) === prefix;
};
}
if(!String.prototype.endsWith){
String.prototype.endsWith=function(prefix,position){
var length=prefix.length;
position=position<length?position:this.length;
return this.slice(position-length, position) === prefix;
};
}
if(!String.prototype.includes) {
String.prototype.includes = function(search, start) {
if(typeof start!=='number'){
start=0;
}
if(start+search.length>this.length){
return false;
}else{
return this.indexOf(search, start)!==-1;
}
};
}
if(!String.prototype.repeat){
String.prototype.repeat=function(count){
if(count<0){
throw 'RangeError repeat count must be non-negative';
}
if(count==Number.POSITIVE_INFINITY){
throw 'RangeError repeat count must be less than infinity';
}
return new Array(count+1).join(this);
};
}
if(!String.prototype.padStart){
String.prototype.padStart=function(targetLength,padString){
var x=targetLength-this.length;
if(x<0) return this+"";
if(!padString) padString=" ";
return padString.repeat(Math.ceil(x/padString.length)).substr(0,x)+this;
};
}
if(!String.prototype.padEnd){
String.prototype.padEnd=function(targetLength,padString){
var x=targetLength-this.length;
if(x<0) return this+"";
if(!padString) padString=" ";
return this+padString.repeat(Math.ceil(x/padString.length)).substr(0,x);
};
}
Math.log2 = Math.log2 || function(n){ return Math.log(n) / Math.log(2); };
Number.isNaN=Number.isNaN || function(value){
return typeof value === "number" && isNaN(value);
};
Number.isInteger=Number.isInteger || function(value){
return typeof value === "number" && isFinite(value) && Math.floor(value) === value;
};
(function(){
/** 时间对象的格式化; **/
/* eg:format="%Y-%m-%d %H:%M:%S"; */
function pad2(number) {
if(number<10){
return '0'+number;
}
return number;
}
if (!Date.prototype.toLocaleFormat) {//部分浏览器支持
Date.prototype.toLocaleFormat = function(format) {
var Y=this.getFullYear();
var M=pad2(this.getMonth()+1);
var D=pad2(this.getDate());
var h=pad2(this.getHours());
var m=pad2(this.getMinutes());
var s=pad2(this.getSeconds());
var o={
"%x":Y+"/"+M+"/"+D,
"%X":h+":"+m+":"+s,
"%Y":Y,
"%y":pad2(this.getYear()%100),
"%m":M,
"%e":this.getDate(),
"%d":D,
"%H":h,
"%i":pad2(this.getHours()%12),
"%M":m,
"%S":s,
"%p":this.getHours()%12>1?"PM":"AM",
"%%":"%"
};
o["%T"]=o["%X"];
return format.replace(/%[xXTYymedHiMSp%]/g,function(word){
for(var k in o){
if(k==word){
return o[k];
}
}
return word;
});
};
}
if (!Date.prototype.toISOString){//部分浏览器支持
Date.prototype.toISOString = function() {
return this.getUTCFullYear()+
'-'+pad2(this.getUTCMonth()+1)+
'-'+pad2( this.getUTCDate() ) +
'T'+pad2( this.getUTCHours() ) +
':'+pad2( this.getUTCMinutes() ) +
':'+pad2( this.getUTCSeconds() ) +
'.'+new String(this.getUTCMilliseconds()).padStart(3,'0')+'Z';
};
}
})();
if(!Date.prototype.toJSON){
Date.prototype.toJSON=Date.prototype.toISOString;
}
if(new Date().toLocaleString().match(/[a-z]/i)){//谷歌浏览器,360用谷歌内核,会显示成英文(未考虑语言环境)
Date.prototype.toLocaleString = function() {
return this.toLocaleFormat("%Y-%m-%d %H:%M:%S");
};
Date.prototype.toLocaleDateString = function() {
return this.toLocaleFormat("%Y-%m-%d");
};
Date.prototype.toLocaleTimeString = function() {
return this.toLocaleFormat("%H:%M:%S");
};
}
if(!Date.now){
Date.now=function(){
return new Date().getTime();
};
}
Sky.toString=null;
if(!Sky.propertyIsEnumerable('toString')){
Sky.dontEnums=["toString","toLocaleString","valueOf","hasOwnProperty", "isPrototypeOf","propertyIsEnumerable"];
Sky.forIn=function(obj,fn,thisArg){
thisArg=thisArg || window;
for(var key in obj) {
if(!(obj instanceof Object)){
if(key.startsWith("__") || key=="constructor"){
continue ;
}
}
if(fn.call(thisArg,obj[key],key)===false){
return false;
}
}
var nonEnumIdx=Sky.dontEnums.length;
var proto=Object.getPrototypeOf(obj);
//遍历nonEnumerableProps数组
while(nonEnumIdx--){
var prop=Sky.dontEnums[nonEnumIdx];
if(prop in obj && obj[prop]!==proto[prop]){
if(fn.call(thisArg,obj[prop],prop)===false){
return false;
}
}
}
return true;
};
Sky.forOwn=function(obj,fn,thisArg){
thisArg=thisArg || window;
var type=typeof obj;
if(type=="unknow"){
return true;
}
if(type!="object"){
obj=Object(obj);
}
for(var key in obj) {
if(!(obj instanceof Object)){
if(key.startsWith("__") || key=="constructor"){
continue ;
}
}
if(Sky.hasOwn(obj,key)){
if(fn.call(thisArg,obj[key],key)===false){
return false;
}
}
}
for(var i=0;i<Sky.dontEnums.length;i++){
var prop=Sky.dontEnums[i];
if(Sky.hasOwn(obj,prop)){
if(fn.call(thisArg,obj[prop],prop)===false){
return false;
}
}
}
return true;
};
Sky.hasOwn=function(obj,key){
if(!(key in obj)){
return false;
}
var value=obj[key];
if(typeof obj=="object" && !(obj instanceof Object)){
if(Sky.isFunction(value)){
return true;
}
return false;
}
return Object.prototype.hasOwnProperty.call(obj,key);
};
}else{
Sky.forIn=function(obj,fn,thisArg){
thisArg=thisArg || window;
for(var key in obj) {
if(fn.call(thisArg,obj[key],key)===false){
return false;
}
}
return true;
};
Sky.forOwn=function(obj,fn,thisArg){
thisArg=thisArg || window;
for(var key in obj) {
if(Object.prototype.hasOwnProperty.call(obj,key)){
if(fn.call(thisArg,obj[key],key)===false){
return false;
}
}
}
return true;
};
Sky.hasOwn=function(obj,key){
return Object.prototype.hasOwnProperty.call(obj,key);
};
}
Sky.pick=function(obj,keys){
var rest={};
if(obj){
Sky.forOwn(obj, function(value,key){
if(keys.indexOf(key)>=0){
rest[key]=value;
}
});
}
return rest;
};
Sky.omit=function(obj,keys){
var rest={};
if(obj){
Sky.forOwn(obj, function(value,key){
if(keys.indexOf(key)<0){
rest[key]=value;
}
});
}
return rest;
};
if(!Object.values){
Object.values=function(obj){
var result=[];
Sky.forOwn(obj,function(value,key){
result.push(obj[key]);
});
return result;
};
}
if(!Object.keys){
Object.keys=function(obj){
var result=[];
Sky.forOwn(obj,function(value,key){
result.push(key);
});
return result;
};
}
if(!Object.assign){
Object.assign=function(target, varArgs){
if(target==null){
throw 'Cannot convert undefined or null to object';
}
var to=Object(target);
for(var i=1;i<arguments.length;i++){
var obj=arguments[i];
if(obj!=null){
Sky.forOwn(obj,function(v,k){
to[k]=v;
});
}
}
return target;
};
}
Sky.inherits=function(clazz,superClazz){
Object.assign(clazz,superClazz);
clazz.prototype=Object.create(superClazz.prototype);
clazz.superclass=superClazz;//为了其他程序的代码方便获取父类
clazz.prototype.constructor=clazz;
}
if(!Object.create){
Object.create=function(proto){
function F(){}
F.prototype = proto;
return new F();
};
}
if (!Object.is){
Object.is=function(x, y){
if(x===y){// Steps 1-5, 7-10
// Steps 6.b-6.e: +0 != -0
return x!==0 || 1/x===1/y;
}else{
// Step 6.a: NaN == NaN
return x!==x && y!==y;
}
};
}
if(!Object.getPrototypeOf){
if('__proto__' in Sky){
Object.getPrototypeOf=function(object){
return object.__proto__;
};
}else{
Object.getPrototypeOf=function(object){
var constructor=object.constructor;
if(Sky.isFunction(constructor)){
if(object!=constructor.prototype){
return constructor.prototype;
}else if('superclass' in constructor){
return constructor.superclass.prototype;
}
}
console.warn("cannot find Prototype");
return Object.prototype;
};
}
}
if(Sky.support.__defineSetter__){
if(!Object.defineProperty) {
Object.defineProperty=function(obj, prop, descriptor){
if(descriptor.get) obj.__defineGetter__(prop,descriptor.get);
if(descriptor.set) obj.__defineSetter__(prop,descriptor.set);
if(descriptor.value) obj[prop]=descriptor.value;
};
}
if(!Object.defineProperties){
Object.defineProperties=function(obj,properties){
for(var key in properties){
var descriptor=properties[key];
if(descriptor.get) obj.__defineGetter__(key,descriptor.get);
if(descriptor.set) obj.__defineSetter__(key,descriptor.set);
if(descriptor.value) obj[key]=descriptor.value;
}
};
}
}
if(!Function.prototype.bind){
Function.prototype.bind=function(context){
var self=this,args=Array.prototype.slice.call(arguments,1);
return function(){
return self.apply(context,args.concat(Array.from(arguments)));
};
};
}
if(!this.Map){
Map=function(){
this.items=[];
this.size=0;
};
Map.prototype.entries=function(){
return this.items.entries();
};
Map.prototype.clear=function(){
this.items.splice(0,this.items.length);
this.size=0;
};
Map.prototype["delete"]=function(key){
var i=this.items.findIndex(function(item){
return item[0]===key;
});
if(i>=0){
var r=this.items[i];
this.items.splice(i,1);
this.size=this.items.length;
return r;
}
return false;
};
Map.prototype.forEach=function(callbackfn,thisArg){
var len=this.size;
for(var i=0,j;i<len; i++){
j=this.items[i];
if(j){
callbackfn.call(thisArg,j[1],j[0],i,this);
}
}
};
Map.prototype.get=function(key){
var r=this.items.find(function(item){
return item[0]===key;
});
if(r){
return r[1];
}
};
Map.prototype.has=function(key){
return this.items.some(function(item){
return item[0]===key;
});
};
Map.prototype.set=function(key,value){
var r=this.items.find(function(item){
return item[0]===key;
});
if(r){
r[1]=value;
}else{
this.items.push([key,value]);
}
this.size=this.items.length;
return this;
};
}
if(!Map.prototype.remove){
Map.prototype.remove=Map.prototype['delete'];
}
if(!this.Set){
Set=function(){
this.items=[];
this.size=0;
};
Set.prototype.has=function(value){
return this.items.indexOf(value)>=0;
};
Set.prototype.add=function(value){
if(!this.has(value)){
this.items.push(value);
this.size=this.items.length;
}
};
Set.prototype['delete']=function(value){
var i=this.items.indexOf(value);
if(i>=0){
this.items.splice(i,1);
this.size=this.items.length;
return true;
}
return false;
};
Set.prototype.clear=function(){
this.items.splice(0,this.items.length);
this.size=0;
};
Set.prototype.forEach=function(callback,thisArg){
for(var i=0,j;i<this.size; i++){
j=this.items[i];
callback.call(thisArg,j,j,this);
}
};
Set.prototype.toArray=function(){
return this.items.slice(0);
};
}
if(!Set.prototype.remove){
Set.prototype.remove=Set.prototype['delete'];
}
if(!Set.prototype.toArray){
Set.prototype.toArray=function(){
var a=[];
this.forEach(function(item){
a.push(item);
});
return a;
};
}
if(!Set.prototype.addAll){
Set.prototype.addAll=function(data){
if(data.forEach){
data.forEach(function(item){
this.add(item);
},this);
}
return this;
};
}
if(!Set.prototype.removeAll){
Set.prototype.removeAll=function(data){
if(data.forEach){
data.forEach(function(item){
this.remove(item);
},this);
}
return this;
};
}
if(!Set.prototype.retainAll){
Set.prototype.retainAll=function(data){
this.forEach(function(item){
if(data.has){
if(!data.has(item)) this.remove(item);
}else if(data.indexOf){
if(data.indexOf(item)<0) this.remove(item);
}
},this);
return this;
};
}
if(!Set.prototype.toArray){
Set.prototype.toArray=function(){
var r=[];
this.forEach(function(item){
r.push(item);
});
return r;
};
}
var URLSearchParams;
if(!this.URLSearchParams){
URLSearchParams=function(paramsString){
this._data=new Array();
if(paramsString){
var i,pair;
if(Array.isArray(paramsString)){
i=this._data.length=paramsString.length;
while(i-->0){
pair=paramsString[i];
this._data[i]=new Array(pairs[1],pairs[0]);
}
}else{
var pairs=paramsString.split("&");
i=this._data.length=pairs.length;
while(i-->0){
pair=pairs[i];
if(pair){
var id=pair.indexOf("=");
this._data[i]=new Array(decodeURIComponent(pair.substring(id+1,pair.length)),decodeURIComponent(pair.substring(0,id)));
}
}
}
}
};
URLSearchParams.prototype.append=function(key,value){
this._data.push([key,value]);
};
URLSearchParams.prototype.get=function(key){
var item=this._data.find(function(item){
return item[1]==key;
});
if(item) return item[0];
return null;
};
URLSearchParams.prototype.getAll=function(key){
return this._data.filter(function(item){
return item[1]==key;
}).map(function(item){
return item[0];
});
};
URLSearchParams.prototype.set=function(key,value){
var item=this._data.find(function(item){
return item[1]==key;
});
if(item){
item[0]=value;
}else{
this.append(key,value);
}
};
URLSearchParams.prototype['delete']=function(key){
this._data=this._data.filter(function(item){
return item[1]!=key;
});
};
URLSearchParams.prototype.has=function(key){
return this._data.some(function(item){
return item[1]==key;
});
};
URLSearchParams.prototype.toString=function(){
return this._data.map(function(item){
return encodeURIComponent(item[1])+"="+encodeURIComponent(item[0]);
}).join("&");
};
URLSearchParams.prototype.sort=function(){
return this._data.sort(function(a,b){
return a[1] > b[1];
});
};
URLSearchParams.prototype.forEach=function(fn,thisArg){
this._data.forEach.apply(this._data,arguments);
};
}
var URL;
(function(window){
var SearchParams=function(url){
this._url=url;
};
SearchParams.prototype=Object.create(URLSearchParams.prototype);
["append","set","delete"].forEach(function(method){
SearchParams.prototype[method]=function(key,value){
var searchParams=new URLSearchParams(this._url.search.replace(/^\?/,""));
searchParams[method].apply(searchParams,arguments);
this._url.search="?"+searchParams.toString();
};
});
["getAll","get","has","toString"].forEach(function(method){
SearchParams.prototype[method]=function(key,value){
var searchParams=new URLSearchParams(this._url.search.replace(/^\?/,""));
return searchParams[method].apply(searchParams,arguments);
};
});
var url=null;
try{
url=new URL(location.href);
}catch(e){
}
if(!url || !('href' in url)){
URL=function(relativePath, absolutePath){
var path,arr,me=this;
if(!Object.defineProperties){
me=VBUrlFactory();
}
me.protocol=me.hostname=me.pathname=null;
me.port=me.search=me.hash=me.username=me.password="";
me.searchParams=new SearchParams(me);
var pattern=/^[a-zA-Z]+:/;
if(arr=relativePath.match(pattern)){
me.protocol=arr[0];
path=relativePath.replace(pattern,"");
pattern=/^\/*([^\/]+)/;
var host=path.match(pattern)[1];
path=path.replace(pattern,"");
arr=host.split("@");
if(arr.length>1){
me.host=arr[1];
arr=arr[0].split(":");
if(arr.length>1){
me.username=arr[0];
me.password=arr[1];
}else{
me.username=arr[0];
}
}else{
me.host=host;
}
}else if(absolutePath){
var absInfo=absolutePath.indexOf?new URL(absolutePath):absolutePath;
me.protocol=absInfo.protocol;
me.hostname=absInfo.hostname;
me.port=absInfo.port;
if(absInfo.username) me.username=absInfo.username;
if(absInfo.password) me.password=absInfo.password;
me.pathname=absInfo.pathname;
if(relativePath.startsWith("#")){
me.search=absInfo.search;
me.hash=relativePath;
return me;
}else if(relativePath.startsWith("?")){
var a=relativePath.indexOf("#");
if(a<0){
me.search=relativePath;
me.hash="";
}else{
me.search=relativePath.substr(0,a);
me.hash=relativePath.substring(a,relativePath.length);
}
return me;
}else if(relativePath.startsWith("/")){
path=relativePath;
}else if(relativePath.startsWith("../")){
path=absInfo.pathname.replace(/\/[^\/]*$/,"/")+relativePath;
pattern=/[^\/]+\/\.\.\//;
while(pattern.test(path)){
path=path.replace(pattern,"");
}
path=path.replace(/^(\/\.\.)+/,"");
}else{
path=absInfo.pathname.replace(/[^\/]*$/,"")+relativePath.replace(/^\.\//,"");
}
}else{alert(arr);
throw "SYNTAX_ERROR";
}
pattern=/^[^#]*/;
me.hash=path.replace(pattern,"");
arr=path.match(pattern);
path=arr[0];
pattern=/^[^\?]*/;
me.search=path.replace(pattern,"");
arr=path.match(pattern);
me.pathname=arr[0];
return me;
};
}
URL.properties={
host:{
enumerable:true,
get:function(){
if(this.port){
return this.hostname+":"+this.port;
}
return this.hostname;
},
set:function(value){
var pattern=/(.*):(\d+)$/;
var arr=value.match(pattern);
this.port="";
if(arr){
this.hostname=arr[1];
this.port=arr[2];
}else{
this.hostname=value;
}
}
},
origin:{
enumerable:true,
get:function(){
return this.protocol+"//"+this.host;
}
},
href:{
enumerable:true,
get:function(){
var user=this.username;
if(user){
if(this.password){
user+=":"+this.password;
}
user+="@";
}
return this.protocol+"//"+user+this.host+this.pathname+this.search+this.hash;
},
set:function(value){
var url=new URL(value);
this.protocol=url.protocol;
this.hostname=url.hostname;
this.pathname=url.pathname;
this.port=url.port;
this.search=url.search;
this.hash=url.hash;
this.username=url.username;
this.password=<PASSWORD>;
}
}
};
if(Object.defineProperties){
if(!url || !('href' in url)){
Object.defineProperties(URL.prototype,URL.properties);
}else{
if(!('origin' in url)){
Object.defineProperty(URL.prototype,"origin",URL.properties.origin);
}
if(!('searchParams' in url)){
Object.defineProperty(URL.prototype,"searchParams",{
enumerable:true,
get:function(){
var searchParams=new SearchParams(this);
Object.defineProperty(this,"searchParams",{
enumerable:true,
value:searchParams
});
return searchParams;
}
});
}
}
}else{
window.execScript([
'Class VBURL',
' Public [protocol]',
' Public [hostname]',
' Public [pathname]',
' Public [port]',
' Public [search]',
' Public [searchParams]',
' Public [hash]',
' Public [username]',
' Public [password]',
' Public Property Let [host](var)',
' Call URL.properties.host.set.call(Me,var)',
' End Property',
' Public Property Get [host]',
' [host]=URL.properties.host.get.call(Me)',
' End Property',
' Public Property Let [origin](var)',
' End Property',
' Public Property Get [origin]',
' [origin]=URL.properties.origin.get.call(Me)',
' End Property',
' Public Property Let [href](var)',
' Call URL.properties.href.set.call(Me,var)',
' End Property',
' Public Property Get [href]',
' [href]=URL.properties.href.get.call(Me)',
' End Property',
'End Class',
'Function VBUrlFactory()',
' Dim o',
' Set o = New VBURL',
' Set VBUrlFactory = o',
'End Function'
].join('\n'), 'VBScript');
}
})(this);
Sky.escapeString=function(str) {//from lodash
var rx_escapable = /[\\\"\u0000-\u001f\u007f-\u009f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g;
rx_escapable.lastIndex = 0;
return rx_escapable.test(str)
? str.replace(rx_escapable, function(a) {
var meta = {
"\b":"\\b","\t":"\\t","\n":"\\n","\f":"\\f","\r": "\\r", "\"": "\\\"","\\": "\\\\"
};
var c = meta[a];
return typeof c === "string"
? c
: "\\u" + ("0000" + a.charCodeAt(0).toString(16)).slice(-4);
}): str;
};
Sky.support.JSON=true;
if(!this.JSON){
Sky.support.JSON=false;
JSON={
'stringify':function(obj){
switch(obj){
case null:
return "null";
case false:
case true:
return obj;
break;
default:
var type=Object.prototype.toString.call(obj);
switch(type){
case '[object String]':
return '"'+Sky.escapeString(obj)+'"';
case '[object Number]':
return isNaN(obj)?"null":obj.toString();
case '[object Array]':
return "["+obj.map(JSON.stringify).join(",")+"]";
default:
if(Sky.isFunction(obj.toJSON)){
return JSON.stringify(obj.toJSON());
}
var items=[];
Sky.forOwn(function(value,key){
if(value!==void 0){
if(!Sky.isFunction(value)){
items.push('"'+Sky.escapeString(k)+'":'+JSON.stringify(value));
}
}
});
return "{"+items.join(",")+"}";
}
}
},
'parse':function(str){
return eval('('+str+')');
}
};
}
//setImmediate在setTimeout之前执行
if(!this.setImmediate){
(function(global){
var index=0;
var handles=new Map();
if(this.Promise){
global.setImmediate=function(fn){
index++;
var args=Array.from(arguments);
args.shift();
var p=Promise.resolve(index);
handles.set(index,args);
p.then(function(id){
var args=handles.get(id);
if(args){
fn.apply(global,args);
clearImmediate(id);
}
});
return index;
};
}else{
var ticks=null;
global.setImmediate=function(fn){
index++;
if(!ticks){
ticks=new Array();
setTimeout(nextTick);
}
ticks.push(index);
handles.set(index,arguments);
return index;
};
function nextTick(){
if(ticks && ticks.length){
for(var i=0;i<ticks.length;i++){
var id=ticks[i];
var args=handles.get(id);
if(args){
var fn=args[0];
args=Array.from(args);
args.shift();
try{
fn.apply(global,args);
}catch(e){
console.error(e);
}
}
}
ticks=null;
handles.clear();
}
}
setImmediate.nextTick=nextTick;
var setTimeoutN=setImmediate.setTimeout=setTimeout;
if(document.addEventListener){
global.setTimeout=function(fn,d){
setTimeoutN(function(){
setImmediate.nextTick();
fn();
},d)
};
}else{
window.execScript("function setTimeout(fn,d){setImmediate.setTimeout(function(){setImmediate.nextTick();fn();},d)}");
}
}
global.clearImmediate=function(id){
handles['delete'](id);
};
})(this);
}
(function(global){
function Deferred(){
this._resolveds=[];
this._rejecteds=[];
this._state="pending";//resolved | rejected
}
Deferred.prototype.state=function(){
return this._state;
};
Deferred.prototype.done=function(fn){
if(this._state=="resolved"){
fn.call(this,this.data);
}else if(this._state=="pending"){
this._resolveds.push(fn);
}
return this;
};
Deferred.prototype.fail=function(fn){
if(this._state=="rejected"){
fn.call(this,this.data);
}else if(this._state=="pending"){
this._rejecteds.push(fn);
}
return this;
};
Deferred.prototype.always=function(fn){
if(this._state=="pending"){
this._resolveds.push(fn);
this._rejecteds.push(fn);
}else{
fn.call(this,this.data);
}
};
Deferred.prototype.resolve=function(d){
if(this._state=="pending"){
this.data=d;
this._state="resolved";
this._resolveds.forEach(callAll,this);
this._resolveds=null;
}
return this;
};
Deferred.prototype.reject=function(d){
if(this._state=="pending"){
this.data=d;
this._state="rejected";
this._rejecteds.forEach(callAll,this);
this._rejecteds=null;
}
return this;
};
function callAll(fn){
fn.call(this,this.data);
}
if(!this.Promise){
function Promise(executor){
Deferred.call(this);
var me=this;
function resolve(value) {
setImmediate(function(){
me.resolve(value);
});
}
function reject(reason) {
setImmediate(function(){
me.reject(reason);
});
}
try{
executor(resolve, reject);
}catch(e){
reject(e);
}
}
Promise.prototype=Object.create(Deferred.prototype);
Promise.prototype.constructor=Promise;
function nextPromise(before,after,resolve,reject){
return function(value){
try{
var x=before(value);
if(typeof x.then==="function"){
x.then(resolve, reject);
}else{
after(x);
}
}catch(r){
reject(r);
}
};
}
Promise.prototype.then=function(onResolved, onRejected){
var me=this;
onResolved=onResolved || Sky.noop;
onRejected=onRejected || Sky.noop;
return new Promise(function(resolve,reject){
switch(me.state()){
case "resolved":
setImmediate(nextPromise(onResolved,resolve,resolve,reject),me.data);
break ;
case "rejected":
setImmediate(nextPromise(onRejected,reject,resolve,reject),me.data);
break ;
default:
me._resolveds.push(nextPromise(onResolved,resolve,resolve,reject));
me._rejecteds.push(nextPromise(onRejected,reject,resolve,reject));
}
});
};
Promise.prototype['catch']=function(onRejected){
return this.then(undefined,onRejected);
};
Promise.all=function(promises){
if (!Sky.isArray(promises)) {
throw new TypeError('You must pass an array to all.');
}
return new Promise(function(resolve,reject){
if(promises.length==0) return resolve(new Array());
var result=new Array(promises.length);
var c=0;
promises.forEach(function(one,index){
if(one instanceof Promise){
one.then(function(data){
c++;
result[index]=data;
if(c>=promises.length){
resolve(result);
}
},function(data){
reject(data);
});
}else{
c++;
result[index]=one;
if(c>=promises.length){
resolve(result);
}
}
});
});
};
Promise.race=function(promises){
if (!Array.isArray(promises)) {
throw new TypeError('You must pass an array to all.');
}
return new Promise(function(resolve,reject){
promises.forEach(function(one){
one.then(function(){
resolve();
},function(){
reject();
});
});
});
};
Promise.resolve=function(arg){
return new Promise(function(resolve,reject){
resolve(arg)
});
};
Promise.reject=function(arg){
return Promise(function(resolve,reject){
reject(arg)
});
};
global.Promise=Promise;
global.Deferred=Deferred;
}
Sky.Deferred=function(){
return new Deferred();
};
})(this);
Sky.when=function(subordinate){
if(arguments.length==1){
return arguments[0];
}
var resolveValues=Array.from(arguments);
var dfd=Sky.Deferred();
var i=0;
resolveValues.forEach(function(item){
item.done(function(){
i++;
if(i==resolveValues.length){
dfd.resolve();
}
});
});
return dfd;
};
Sky.support.XMLHttpRequest=true;
if(!this.XMLHttpRequest){
Sky.support.XMLHttpRequest=false;
XMLHttpRequest=function(){
if(XMLHttpRequest.progid){
return new ActiveXObject(XMLHttpRequest.progid);
}
var versions=["Microsoft.XMLHTTP","MSXML2.XMLHTTP","Msxml2.XMLHTTP.5.0"];
var i=versions.length;
while(i--){
try{
var progid=versions[i];
var request=new ActiveXObject(progid);
if(request){
XMLHttpRequest.progid=progid;
return request;
}
}catch(e){}
}
};
}
if(!('head' in document)) document.head=document.getElementsByTagName("head")[0];
location.origin=location.origin || location.protocol+"//"+location.host;
/** 判断一个节点后代是否包含另一个节点 **/
if(this.Node && Node.prototype && !Node.prototype.contains){
Node.prototype.contains=function(arg){
return !!(this.compareDocumentPosition(arg) & 16);
}
}
if(!document.contains){
document.contains=function(ele){
var i,arr=document.all;
for(i=0;i<arr.length;i++){
if(arr[i]===ele){
return true;
}
}
return false;
};
}
if(this.HTMLElement) {
if(!document.head.children){
HTMLElement.prototype.__defineGetter__("children", function() {
var a=[];
for(var i=0; i<this.childNodes.length; i++){
var n=this.childNodes[i];
if(n.nodeType==1){
a.push(n);
}
}
return a;
});
}
if(!('innerText' in document.head)){
(function(){
HTMLElement.prototype.__defineGetter__( "innerText", function(){
var anyString = "";
var childS = this.childNodes;
for(var i=0; i<childS.length; i++){
var node=childS[i];
if(node.nodeType==1){
switch(node.tagName){
case "BR":
anyString+='\n';
break ;
case "SCRIPT":
case "STYLE":
case "TEMPLATE":
break ;
default :
anyString+=node.innerText;
}
}else if(node.nodeType==3){
var nodeValue=node.nodeValue;
if(i==0)
nodeValue=nodeValue.trimLeft();
if(i==childS.length-1)
nodeValue=nodeValue.trimRight();
if(i>0 && i<childS.length-1){
if(nodeValue.match(/^\s+$/)){
if(checkBlock(childS[i-1]) || checkBlock(childS[i+1])){
nodeValue="\n";
}
}
}
anyString+=nodeValue;
}
}
return anyString.trim();
});
function checkBlock(node){
switch(node.tagName){
case "BR":
case "SPAN":
case "I":
case "U":
case "B":
case "FONT":
return false;
}
return true;
}
})();
HTMLElement.prototype.__defineSetter__( "innerText", function(sText){
this.textContent=sText;
});
}
}
(function(){
var nodes=document.getElementsByTagName('SCRIPT');
var currentScript=nodes[nodes.length-1];
Sky.support.getCurrentScript=true;
if(document.currentScript!==void 0){//最新浏览器
}else{
if("readyState" in currentScript){
Sky.getCurrentScript=function(){//IE11-
var nodes=document.getElementsByTagName('SCRIPT');
var i=nodes.length;
while(i--){
var node=nodes[i];
if(node.readyState==="interactive"){
return node;
}
}
return null;
};
if(Object.defineProperty){
Object.defineProperty(document,"currentScript",{
enumerable:!!Object.defineProperties,//IE8不支持enumerable
get:function(){
return Sky.getCurrentScript();
}
});
}
}else{
document.addEventListener('load',function(e){
if(e.target.tagName==="SCRIPT"){
e.target.readyState="complete";
}
},true);
Sky.support.getCurrentScript=false;
Object.defineProperty(document,"currentScript",{
enumerable:true,
get:function(){
if(Sky.support.getCurrentPath){
var path=Sky.getCurrentPath();
var nodes=document.getElementsByTagName('SCRIPT');
if(path){
for(var i=0;i<nodes.length;i++){
var node=nodes[i];
if(path===new URL(node.src,location).href){
if(node.readyState!=="complete") {
return node;
}
}
}
return null;
}
if(Sky.isReady){
return null;
}
}
nodes=document.getElementsByTagName('SCRIPT');
return nodes[nodes.length-1];
}
});
}
}
if(!Sky.getCurrentScript){//最新浏览器
Sky.getCurrentScript=function(){
return document.currentScript;
};
}
Sky.support.getCurrentPath=true;
try{
throw new Error('get stack');
}catch(e){
var stackHandler={
'stack':[
/^@(.*):\d+$/,// Firefox
/^\s+at (.*):\d+:\d+$/,//Chrome
/^\s+at [^\(]*\((.*):\d+:\d+\)$/ //IE11
],
'stacktrace':[
/\(\) in\s+(.*?\:\/\/\S+)/m//opera
]
};
var stackResult=handleStack(e,stackHandler);
if(stackResult){
Sky.getCurrentPath=function(){
try{
throw new Error('get stack');
}catch(e){
var arr=getLastStack(e[stackResult.name]).match(stackResult.pattern);
if(arr){
if(arr[1]!=location.href && arr[1]!=location.origin+location.pathname+location.search){
return arr[1];
}
}
}
};
}
}
if(!Sky.getCurrentPath){
Sky.support.getCurrentPath=false;
Sky.getCurrentPath=function(){
var currentScript=Sky.getCurrentScript();
return new URL(currentScript.src,location).href;
};
}
function getLastStack(stack){
var stacks=stack.trim().split("\n");;
return stacks[stacks.length-1];
}
function handleStack(e,stackHandler){
for(var name in stackHandler){
var stacks=e[name];
if(stacks){
var patterns=stackHandler[name];
var stack=getLastStack(stacks);
var i=patterns.length;
while(i--){
var pattern=patterns[i];
if(pattern.test(stack)){
return {'name':name,'pattern':pattern};
}
}
}
}
}
})();
if(!this.console){
console={};
if(this.Debug){
console.log=console.info=console.error=console.warn=function(data){
Debug.writeln(data);
};
}else{
console.log=console.info=console.error=console.warn=function(data){
window.status=data;
};
console.clear=function(){
window.status='';
};
}
}
Sky.getScript=function(src,func,charset){
var script=document.createElement('script');
if(!charset){charset="UTF-8"};
script.charset=charset;
script.src=src;
script.async=true;
if(func){
var event='onreadystatechange';
if(event in script){
script.attachEvent(event,function(){
if(script.readyState==='loaded'){
document.head.appendChild(script);
}else if(script.readyState==='complete'){
script.detachEvent(event,arguments.callee);
var evt=window.event;
//evt.target=evt.currentTarget=evt.srcElement;
func.call(script,evt);
}
});
}else{
if('onafterscriptexecute' in script){
script.onafterscriptexecute=func;
}else{
script.onload=func;
}
document.head.appendChild(script);
}
}else{
document.head.appendChild(script);
}
return script;
};
(function(){
Sky.isReady=false;
var p=new Promise(function(resolve, reject){
if(document.addEventListener){
document.addEventListener("DOMContentLoaded",function(){
Sky.isReady=true;
resolve();
},false);
}else if(window==window.top){
(function() {
try{
document.documentElement.doScroll('left');
Sky.isReady=true;
resolve();
}catch(e){
setTimeout(arguments.callee, 0);
}
})();
}else{
document.attachEvent("onreadystatechange",function(){
if(document.readyState === "complete") {
document.detachEvent("onreadystatechange", arguments.callee);
Sky.isReady=true;
resolve();
}
});
}
});
Sky.ready=function(callback){
if(callback && !Sky.isReady){
return p.then(callback);
}
return p;
};
Sky.then=function(callback){
return p.then(callback);
};
})();
Sky.times=function(n,iteratee,thisArg){
if(n<1){
return [];
}
var index = -1,
result = Array(n);
while (++index < n) {
result[index] = iteratee.apply(this,thisArg);
}
return result;
};
Sky.findIndex=function(arr,key,value){
for(var i=0; i<arr.length; i++){
if(arr[i][key]===value){return i;}
}
return -1;
};
Sky.findLastIndex=function(arr,key,value){
for(var i=arr.length-1; i>=0; i--){
if(arr[i][key]===value){return i;}
}
return -1;
};
Sky.find=function(arr,key,value){
for(var i=0; i<arr.length; i++){
if(arr[i][key]===value){return arr[i];}
}
};
Sky.findLast=function(arr,key,value){
for(var i=arr.length-1; i>=0; i--){
if(arr[i][key]===value){return value;}
}
};
Sky.shuffle=function(arr){
var copyArr=arr.slice();
var ubound=arr.length-1;
for(var i=0; i<ubound; i++){
var r=Sky.random(0,ubound);
var tmp=copyArr[r];
copyArr[r]=copyArr[i];
copyArr[i]=tmp;
}
return copyArr;
};
Sky.sortBy=function(arr,key){
return arr.sort(function(a,b){
return a[key] > b[key];
});
};
Sky.pluck=function(arr,key){
return arr.map(function(item){
return item[key];
});
};
Sky.sortedIndex=function(arr,value){
for(var i=0; i<arr.length; i++){
if(arr[i]>=value){
return i;
}
}
return arr.length;
};
Sky.sortedLastIndex=function(arr,value){
for(var i=arr.length-1; i>=0; i--){
if(arr[i]<=value){
return i+1;
}
}
};
//数字开头补零
Sky.pad=function(value,width,chars){
if(!chars){chars=" ";}
if(Sky.isNumber(value)){
chars="0";
}
value+='';
return value.padStart(width,chars);
};
//清除HTML代码
Sky.escapeHtml=function(str) {
return str.replace(/&/g,'&')
.replace(/</g,'<')
.replace(/>/g,'>');
};
Sky.escapeAttribute=function(str,quot){
var esc=Sky.escapeHtml(str);
if(!quot || quot=='"'){
return esc.replace(/"/g,'"');
}else{
return esc.replaceAll(quot.charAt(0),'&#'+quot.charCodeAt(0)+";");
}
};
(function(){
var div=document.createElement('div');
var htmlEscapes={
'&': '&',
'<': '<',
'>': '>',
'"': '"',
"'": ''',
'`': '`'
};
Sky.escape=function(text){
return text.replace(/[&<>"'`]/g,function(i){
return htmlEscapes[i];
});
};
Sky.unescape=function(html){
div.innerHTML=html;
return div.innerText || div.textContent ;
};
})();
Sky.escapeRegExp=function(str){//from lodash
if(str){
var reRegExpChars = /^[:!,]|[\\^$.*+?()[\]{}|\/]|(^[0-9a-fA-Fnrtuvx])|([\n\r\u2028\u2029])/g;
reRegExpChars.lastIndex = 0;
return (reRegExpChars.test(str))
? str.replace(reRegExpChars, function(chr, leadingChar, whitespaceChar) {
if (leadingChar) {
var regexpEscapes = {
'0': 'x30', '1': 'x31', '2': 'x32', '3': 'x33', '4': 'x34',
'5': 'x35', '6': 'x36', '7': 'x37', '8': 'x38', '9': 'x39',
'A': 'x41', 'B': 'x42', 'C': 'x43', 'D': 'x44', 'E': 'x45', 'F': 'x46',
'a': 'x61', 'b': 'x62', 'c': 'x63', 'd': 'x64', 'e': 'x65', 'f': 'x66',
'n': 'x6e', 'r': 'x72', 't': 'x74', 'u': 'x75', 'v': 'x76', 'x': 'x78'
};
chr = regexpEscapes[chr];
} else if (whitespaceChar) {
var stringEscapes = {
'\\': '\\',
"'": "'",
'\n': 'n',
'\r': 'r',
'\u2028': 'u2028',
'\u2029': 'u2029'
};
chr = stringEscapes[chr];
}
return '\\' + chr;
})
: str;
}
return "(?:)";
};
Sky.replaceAll=function(str, reallyDo, replaceWith, ignoreCase) {
return str.replace(new RegExp(Sky.escapeRegExp(reallyDo), (ignoreCase ? "gi": "g")), replaceWith);
};
//获取字符串占位长度
Sky.strlen=function(str){
var len=0;
for(var i = 0; i < str.length; i++){
if (str.charCodeAt(i) > 127 || str.charCodeAt(i) < 0){
len+=2;
}else{
len++;
}
}
return len;
};
//截取字符串占位长度
Sky.trunc=function(str,len,replaceStr){
var relen=Sky.strlen(replaceStr);
if(relen>len){
for (var i = relen.length-1; i >= 0; i--){
if (relen.charCodeAt(i) > 127 || relen.charCodeAt(i) < 0){
len-=2;
}else{
len--;
}
if(len<0){
i++;
return replaceStr.substr(i,replaceStr.length-i);
}
}
}else{
len-=relen;
var p=0;
for (var i = 0; i < str.length; i++){
if (str.charCodeAt(i) > 127 || str.charCodeAt(i) < 0){
p+=2;
}else{
p++;
}
if(p>len){
return str.substring(0,i)+replaceStr;
}
}
return str;
}
};
//数字开头补零
Sky.pad=function(value,width,chars){
if(!chars){chars=" ";}
if(Sky.isNumber(value)){
chars="0";
}
value+='';
return value.padStart(width,chars);
};
//清除HTML代码
Sky.escapeHtml=function(str) {
return str.replace(/&/g,'&')
.replace(/</g,'<')
.replace(/>/g,'>');
};
Sky.escapeAttribute=function(str,quot){
var esc=Sky.escapeHtml(str);
if(!quot || quot=='"'){
return esc.replace(/"/g,'"');
}else{
return esc.replaceAll(quot.charAt(0),'&#'+quot.charCodeAt(0)+";");
}
};
(function(){
var div=document.createElement('div');
var htmlEscapes={
'&': '&',
'<': '<',
'>': '>',
'"': '"',
"'": ''',
'`': '`'
};
Sky.escape=function(text){
return text.replace(/[&<>"'`]/g,function(i){
return htmlEscapes[i];
});
};
Sky.unescape=function(html){
div.innerHTML=html;
return div.innerText || div.textContent ;
};
})();
Sky.escapeRegExp=function(str){//from lodash
if(str){
var reRegExpChars = /^[:!,]|[\\^$.*+?()[\]{}|\/]|(^[0-9a-fA-Fnrtuvx])|([\n\r\u2028\u2029])/g;
reRegExpChars.lastIndex = 0;
return (reRegExpChars.test(str))
? str.replace(reRegExpChars, function(chr, leadingChar, whitespaceChar) {
if (leadingChar) {
var regexpEscapes = {
'0': 'x30', '1': 'x31', '2': 'x32', '3': 'x33', '4': 'x34',
'5': 'x35', '6': 'x36', '7': 'x37', '8': 'x38', '9': 'x39',
'A': 'x41', 'B': 'x42', 'C': 'x43', 'D': 'x44', 'E': 'x45', 'F': 'x46',
'a': 'x61', 'b': 'x62', 'c': 'x63', 'd': 'x64', 'e': 'x65', 'f': 'x66',
'n': 'x6e', 'r': 'x72', 't': 'x74', 'u': 'x75', 'v': 'x76', 'x': 'x78'
};
chr = regexpEscapes[chr];
} else if (whitespaceChar) {
var stringEscapes = {
'\\': '\\',
"'": "'",
'\n': 'n',
'\r': 'r',
'\u2028': 'u2028',
'\u2029': 'u2029'
};
chr = stringEscapes[chr];
}
return '\\' + chr;
})
: str;
}
return "(?:)";
};
Sky.replaceAll=function(str, reallyDo, replaceWith, ignoreCase) {
return str.replace(new RegExp(Sky.escapeRegExp(reallyDo), (ignoreCase ? "gi": "g")), replaceWith);
};
//获取字符串占位长度
Sky.strlen=function(str){
var len=0;
for(var i = 0; i < str.length; i++){
if (str.charCodeAt(i) > 127 || str.charCodeAt(i) < 0){
len+=2;
}else{
len++;
}
}
return len;
};
//截取字符串占位长度
Sky.trunc=function(str,len,replaceStr){
var relen=Sky.strlen(replaceStr);
if(relen>len){
for (var i = relen.length-1; i >= 0; i--){
if (relen.charCodeAt(i) > 127 || relen.charCodeAt(i) < 0){
len-=2;
}else{
len--;
}
if(len<0){
i++;
return replaceStr.substr(i,replaceStr.length-i);
}
}
}else{
len-=relen;
var p=0;
for (var i = 0; i < str.length; i++){
if (str.charCodeAt(i) > 127 || str.charCodeAt(i) < 0){
p+=2;
}else{
p++;
}
if(p>len){
return str.substring(0,i)+replaceStr;
}
}
return str;
}
};
Sky.extend=function(){//扩展对象
var args=arguments;
if(args.length==0) return;
if(args.length==1) return args[0];
var temp=args[0]==true?args[1]:args[0]; //调用复制对象方法
for (var n=args[0]==true?2:1;n<args.length;n++){
for(var i in args[n]){
if(Sky.hasOwn(args[n],i)){
if(args[n][i]!=null && args[0]==true && Sky.isObject(args[n][i]) && Sky.isObject(temp[i])){
temp[i]=Sky.extend(true,temp[i],args[n][i]);
//temp[i] = args[n][i];
}else{
temp[i] = args[n][i];
}
}
}
}
return temp;
};
Sky.apply=function(obj,config){
console.warn("Deprecated. use Object.assign");
Sky.forIn(config,function(v,k){
obj[k]=v;
});
return obj;
};
Sky.applyIf=function(obj,config){
Sky.forIn(config,function(v,k){
if(!(k in obj)){
obj[k]=v;
}
});
return obj;
};
Sky.getFormData=function(form){
if(Sky.isString(form)){
form=document.forms[form];
}
if(form.tagName.toUpperCase()!="FORM"){
throw "form is not exit";
}
var o={};
for(var i=0; i<form.length; i++){
var input=form[i];
if(input.name){
var arr,name,value;
switch (input.type) {
case "checkbox":
if(input.checked){
if(arr=input.name.match(/(.*)\[\]$/)){
name=arr[1];
value=o[name];
if(!value){
o[name]=value=[];
}
if(input.value){
value.push(input.value);
}else{
value.push("on");
}
}else if(arr=input.name.match(/(.*)\[([^\]]+)\]$/)){
name=arr[1];
var key=arr[2];
value=o[name];
if(!value){
o[name]=value={};
}
if(input.value){
value[key]=input.value;
}else{
value[key]="on";
}
}else{
o[input.name]=input.value;
}
}
break;
case "radio":
if(input.checked){
o[input.name]=input.value;
}
break;
default:
o[input.name]=input.value;
}
}
}
return o;
};
Sky.setFormData=function(form,data){
if(Sky.isString(form)){
form=document.forms[form];
}
if(form.tagName.toUpperCase()!="FORM"){
throw "form is not exit";
}
for(var i=0; i<form.length; i++){
var input=form[i];
if(input.name){
var arr,name,value;
switch (input.type) {
case "checkbox":
if(data){
if(arr=input.name.match(/(.*)\[\]$/)){
name=arr[1];
if(name in data){
value=data[name];
if(value.split) value=value.split(",");
if(value.indexOf && value.indexOf(input.value)>=0){
input.checked=true;
}else{
input.checked=false;
}
}
}else if(arr=input.name.match(/(.*)\[([^\]]+)\]$/)){
name=arr[1];
if(name in data){
var key=arr[2];
value=data[name];
if(value && value[key]){
input.value=value[key];
input.checked=true;
}else{
input.checked=false;
}
}
}else{
if(input.name in data){
value=data[input.name];
if(value){
input.value=value;
input.checked=true;
}else{
input.checked=false;
}
}
}
}else{
input.checked=false
}
break;
case "radio":
if(data){
if(input.name in data){
input.checked=data[input.name]==input.value;
}
}else{
input.checked=false
}
break;
default:
if(data){
if(input.name in data){
input.value=data[input.name];
}
}else{
input.value="";
}
}
}
}
};
Sky.clearFormData=function(form){
if(Sky.isString(form)){
form=document.forms[form];
}
if(form.tagName.toUpperCase()!="FORM"){
throw "form is not exit";
}
for(var i=0; i<form.length; i++){
var input=form[i];
switch (input.type) {
case "checkbox":
case "radio":
input.checked=false;
break;
default:
input.value="";
}
}
};
var define,require;
(function(window){
Sky.Module=Module;
var commentRegExp=/\/\*[\s\S]*?\*\/|([^:"'=]|^)\/\/.*$/mg;
var cjsRequireRegExp=/[^.]\s*require\s*\(\s*["']([^'"\s]+)["']\s*\)/g;
var STATUS={
INITED:0,//初始化
LOADING:1,//正在加载script
DEFINED:2,//已定义
DEPENDING:3,//正在加载依赖
COMPLETE:4//完成
};
var libs=new Map();
var cache=new Map();
var config=new Map();
var paths=new Map();
var map=new Map();
var baseUrl=location.href;
var urlArgs="";
var pkgs=[];
var rules=[];
var hooks=[];
var shim={};
function Module(name){
this.status=STATUS.INITED;
this.name=name;
var me=this;
this.promise=new Promise(function(resolve, reject){
var delay=null;
me.resolve=function(exports){
if(exports!==void 0){
me.exports=exports;
}
var pluginResolve=function(exports){
me.status=STATUS.COMPLETE;
resolve(exports);
};
var i=hooks.length;
while(i-->0){
var hook=hooks[i];
var r=hook.call(this,pluginResolve,reject);
if(r===false){
return false;
}
}
if(delay){
delay(pluginResolve, reject);
}else{
me.status=STATUS.COMPLETE;
resolve(me.exports);
}
};
me.reject=reject;
me.delay=function(fn){
delay=fn;
};
});
}
/*
全局变量中的require
*/
require=function(deps,callback,onerror){
var from=this;
if(from==window){
from=new Module(null);
from.script=Sky.getCurrentScript();
}
if(!from.dependencies){
from.dependencies=new Array();
}
if(Array.isArray(deps)){
var modules=new Array();//需要加载JS文件的模块
var promises=new Array(deps.length);
for(var i=0;i<deps.length;i++){
var dep=deps[i];
switch(dep){
case 'require':
promises[i]=Promise.resolve(require.bind(from));
break;
case 'exports':
promises[i]=Promise.resolve(from.exports=new Object());
break;
case 'module':
promises[i]=Promise.resolve(from);
break;
default:
var module;
var arr=dep.split("!");
if(arr.length==2){
module=nameToModule(arr[0],from);
promises[i]=module.promise.then(function(plugin){
return new Promise(function(resolve, reject){
plugin.load(arr[0], require.bind(module), resolve);
});
});
}else{
module=nameToModule(dep,from);
promises[i]=module.promise;
}
if(module.status<=STATUS.LOADING){
modules.push(module);
}else if(module.status==STATUS.DEFINED){
module.load();//加载依赖
}
from.dependencies.push(module);
}
}
Promise.all(promises).then(function(data){
callback && callback.apply(from,data);
},function(e){
onerror && onerror.call(from,e);
});
loadModelesScript(modules);
checkCircular(from);//检测循环依赖
return from;
}else{
var name=deps;
var module=nameToModule(name,from);
if(module.status===STATUS.COMPLETE){
return module.exports;
}else if(module.status===STATUS.DEFINED){
return module.loadSync();
}
throw new Error("module("+name+") must loaded before");
}
};
/**
* 根据字符串查找模块
* */
function nameToModule(name,from){
var module,url;
if(name.startsWith("//") || name.match(/^\w+:/) ){//模块名称是绝对路径
url=new URL(name,baseUrl);
}else{
if(name.startsWith(".")){//模块名称是相对路径
name=new URL(name,"http://localhost/"+from.name).pathname.replace("/","");
}
if(from){//优先查询同脚本模块
if(from.script){
if(from.script.modules){
module=from.script.modules.find(findName,name);
if(module){
return module;
}
}
}else{
debugger ;
}
}
//查询全局声明的模块
module=cache.get(name);
if(module){
return module;
}
var pkg=checkPkgs(name);
if(pkg){
url=new URL(pkg,baseUrl);
}else{
//根据配置获取
url=nameToURL(name,from);
if(!url){
url=new URL(name,baseUrl);
}
}
}
//TODO 非js模块
//js模块
if(!url.search){
if(!url.pathname.endsWith(".js")){
url.pathname+=".js";
}
if(urlArgs){
url.search+="?"+urlArgs;
}
}else{
if(urlArgs){
url.search+="&"+urlArgs;
}
}
var path=url.href;
var script=libs.get(path);
if(script){
var lib=script.modules;
if(lib.length==1){//匿名模块文件
return lib[0];
}
module=lib.find(findName,name);
if(module){
cache.set(name,module);
return module;
}else{
var requires=script.requires;
if(requires){
module=requires.find(findName,name);
if(module){
return module;
}
module=lib.find(findNoName,name);
if(module){
return module;
}
module=new Module(name);
cache.set(name,module);
module.src=path;
module.script=script;
module.status=STATUS.LOADING;
requires.push(module);
return module;
}
console.warn("module ["+name+"] not in js \""+path+"\"");
}
}else{
module=new Module(name);
cache.set(name,module);
module.src=path;
return module;
}
}
function checkPkgs(name){
var i=pkgs.length;
while(i-->0){
var pkg=pkgs[i];
if(pkg==name){
return pkg;
}
if(name.startsWith(pkg+"/")){
return pkg;
}
}
return false;
}
function nameToURL(name,from){
var i=rules.length;
while(i--){
var rule=rules[i];
var url=rule(name,from);
if(url){
return url;
}
}
var path=paths.get(name);
if(path){
return new URL(path,baseUrl);
}
var fromPaths=map.get(from.name);
if(fromPaths){
path=fromPaths.get(name);
if(path){
return new URL(path,baseUrl);
}
}
return null;
}
function findName(mod){
return mod.name==this;
}
function findNoName(mod){
return mod.name==null;
}
/**加载script */
function loadModelesScript(modules){
var libs=new Map();
var i=modules.length;
while(i-->0){
var mod=modules[i];
if(mod.status==STATUS.INITED){
var lib=libs.get(mod.src);
if(!lib){
lib=new Array();
libs.set(mod.src,lib);
}
lib.push(mod);
}
}
libs.forEach(loadModelesScriptPath);
}
function loadModelesScriptPath(modules,src){
var script=Sky.getScript(src,handleLast);
libs.set(src,script);
script.requires=modules;
script.modules=[];
script.onerror=handleError;
var i=modules.length;
while(i-->0){
var mod=modules[i];
mod.status=STATUS.LOADING;
mod.script=script;
}
}
function handleError(message,url,line){
var requires=this.requires;
requires.forEach(function(module){
module.reject({'message':message,'url':url,'line':line});
});
}
function handleLast(){
var requires=this.requires;
this.requires=null;
var i=requires.length;
while(i-->0){
var module=requires[i];
if(module.status<=STATUS.LOADING){
useShim.call(this,module);
}else if(module.status==STATUS.DEFINED){
module.load();
}
}
}
function useShim(module){
if(Object.prototype.hasOwnProperty.call(shim,module.name)){
module.resolve(window[shim[module.name]]);
}else{
console.warn("No module found in script:"+this.src);
}
}
Module.prototype.define=function(deps,initor){
if(this.name){
if(checkPkgs(this.name)){
cache.set(this.name,this);
}
}
this.script.modules.push(this);
if(Sky.isFunction(initor)){
this.initor=initor;
this.deps=deps;
this.status=STATUS.DEFINED;
}else{
this.resolve(initor);
}
};
Module.prototype.config=function(){
return config.get(this.name);
};
/*
加载依赖
*/
Module.prototype.load=function(){
if(this.deps && this.deps.length){
this.status=STATUS.DEPENDING;//加载依赖
require.call(this,this.deps,function(){
this.resolve(this.initor.apply(this,arguments));
},function(e){
this.reject(e);
});
}else{
this.resolve(this.initor());
}
};
Module.prototype.loadSync=function(){
var result;
this.plugin=function(fn){
throw "the module ["+this.name+"] has not been loaded yet";
};
if(this.deps && this.deps.length){
var deps=this.deps.map(function(dep){
return require.call(this,dep);
},this);
result=this.initor.apply(this,deps);
}else{
result=this.initor();
}
this.resolve(result);
this.status=STATUS.COMPLETE;
return this.exports;
};
Module.define=function(name,deps,initor){
var module;
var script=Sky.getCurrentScript();
if(script.modules){
var path=new URL(script.src,location).href;
libs.set(path,script);
}else{
script.modules=new Array();
}
if(script.requires){
var i=script.requires.length;
while(i-->0){
module=script.requires[i];
if(module.status<=STATUS.LOADING){
if(name==null || module.name==name){
module.define(deps,initor);
return ;
}
}
}
}
module=new Module(name);
cache.set(name,module);
module.script=script;
module.define(deps,initor);
};
/*
define(data);
define(initor);
define(deps,initor);
define(name,deps,initor);
*/
define=function(arg1,arg2,arg3){
switch(arguments.length){
case 1:
if(Sky.isFunction(arg1)){
var deps=new Array();
switch(arg1.length){
case 3:
deps.unshift('module');
case 2:
deps.unshift('exports');
case 1:
deps.unshift('require');
break ;
}
arg1.toString().replace(commentRegExp,commentReplace).replace(cjsRequireRegExp,function(match,dep){
deps.push(dep);//CMD
});
Module.define(null,deps,arg1);
}else{
Module.define(null,null,arg1);
}
break;
case 2:
Module.define(null,arg1,arg2);
break;
case 3:
Module.define(arg1,arg2,arg3);
}
};
function checkCircular(module){
if(module.dependencies.length){
var stack=new Array();
stack.push(module);
return checkCircularSub(module,stack);
}
}
function checkCircularSub(module,stack){
var i=module.dependencies.length
stack.push(module);
while(i-->0){
var mod=module.dependencies[i];
if(stack.includes(mod)){
var j=stack.length;
while(j-->0){
m=stack[j];
if('exports' in m){
m.resolve(m.exports);
m.status=STATUS.COMPLETE;
return ;
}
}
console.error("circular dependency found,should use exports");
return ;
}
if(mod.dependencies && mod.STATUS!=STATUS.COMPLETE){
stack.push(mod);
checkCircularSub(mod,stack);
stack.pop();
}
}
}
function commentReplace(match, singlePrefix) {
return singlePrefix || '';
}
require.path=function(rule){
rules.push(rule);
};
require.complete=function(hook){
hooks.push(hook);
};
require.config=function(options){
Sky.forOwn(options.paths,function(value,key){
paths.set(key,value);
});
Sky.forOwn(options.bundles,function(names,path){
if(names.forEach){
names.forEach(function(name){
paths.set(name,path);
});
}
});
Sky.forOwn(options.map,function(paths,formPath){
var pathMap=map.get(formPath);
if(!pathMap){
pathMap=new Map();
map.set(formPath,pathMap);
}
paths.forEach(function(path,name){
pathMap.set(name,path);
});
});
Sky.forOwn(options.config,function(value,key){
config.set(key,value);
});
if(options.baseUrl){
baseUrl=options.baseUrl;
}
if(options.urlArgs){
urlArgs=options.urlArgs;
}
if(options.pkgs){
var i=options.pkgs.length;
while(i-->0){
var pkg=options.pkgs[i];
if(!pkgs.includes(pkg)){
pkgs.push(pkg);
}
}
}
};
define.amd=true;
})(this); |
#!/bin/bash
# A Bash script, by Daniar
# chmod 755 myscript.sh
echo ================================================
echo GET NODE\'s IP
echo ================================================
echo Start from node?
read counter
echo Finished on node?
read maxNodes
rm list_ip.txt
while [ $counter -le $maxNodes ]
do
host=node-$counter.ucare.edu
ip=$(ping -c1 -n $host | head -n1 | sed "s/.*(\([0-9]*\.[0-9]*\.[0-9]*\.[0-9]*\)).*/\1/g")
echo IP of $host = $ip
echo $ip >> list_ip.txt
((counter++))
done |
def char_total(string):
total = 0
for char in string:
total += ord(char)
return total
print(char_total('Hello World!'))
# Output: 1116 |
use std::io;
use std::collections::HashMap;
struct ElectionResult {
voter_name: String,
voted_party: String,
state: String,
}
fn visualize_results(input_data: Vec<ElectionResult>) {
let mut states_voted = HashMap::new();
for result in &input_data {
// get the state name and save it in the map
let state = &result.state;
if !states_voted.contains_key(state){
states_voted.insert(state, vec![]);
}
states_voted.get_mut(state).unwrap().push(result.voted_party);
}
// generate the visualization
for (state, parties_voted) in &states_voted {
//TODO: generate visualization
}
} |
<reponame>Goytai/NasaAPI
import { Field, ID, ObjectType } from 'type-graphql';
import { ObjectID } from 'typeorm';
@ObjectType()
export class StationsResponse {
@Field(() => ID)
id: ObjectID;
@Field()
planetName: string;
@Field()
createdAt: Date;
}
|
#!/bin/bash
start=`date +%s`
# train
CUDA_VISIBLE_DEVICES=4,5,6,7 python train.py \
--save_folder=renew_512_set2 --deploy --batch_size=32 --ssd_dim=512 --max_iter=100000 \
--prior_config=v2_512 --lr=1e-3 --schedule=60000,80000,90000 --gamma=0.5
# test
CUDA_VISIBLE_DEVICES=1 python eval.py --experiment_name=renew_512_set2 \
--trained_model=final_v2.pth \
--ssd_dim=512 --conf_thresh=0.005 --top_k=300 --nms_thresh=0.45 \
--prior_config=v2_512
end=`date +%s`
runtime=$((end-start))
echo "Completed in" $runtime "seconds"
|
// 17. 电话号码的字母组合
// https://leetcode-cn.com/problems/letter-combinations-of-a-phone-number/
package question17
var m = map[uint8]string{
'2': "abc",
'3': "def",
'4': "ghi",
'5': "jkl",
'6': "mno",
'7': "pqrs",
'8': "tuv",
'9': "wxyz",
}
// 思路:循环调用
func LetterCombinations(digits string) []string {
if len(digits) == 0 {
return []string{}
}
return f(digits)
}
func f(digits string) []string {
var res []string
if len(digits) == 1 {
s := m[digits[0]]
for _, v := range s {
res = append(res, string(v))
}
return res
} else {
r := f(digits[1:])
if s, ok := m[digits[0]]; ok {
for _, v := range s {
for _, val := range r {
res = append(res, string(v)+val)
}
}
} else {
return r
}
return res
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.rel;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlKind;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.math.expr.ExprType;
import org.apache.druid.query.aggregation.PostAggregator;
import org.apache.druid.query.aggregation.post.ExpressionPostAggregator;
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.PostAggregatorVisitor;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.table.RowSignature;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
/**
* Used to represent projections (Calcite "Project"). These are embedded in {@link Sorting} and {@link Grouping} to
* store post-sorting and post-grouping projections, as well as directly in {@link DruidQuery} to store potential
* post-selection projections. They may be built using either virtual columns (pre-aggregation) or post-aggregators.
*
* It is expected that callers will create and use Projection instances in the same context (pre- or post-aggregation).
* If this isn't done properly (i.e. a caller creates a pre-aggregation Projection but then calls
* {@link #getPostAggregators()} then an exception will be thrown.
*/
public class Projection
{
@Nullable
private final List<PostAggregator> postAggregators;
@Nullable
private final List<VirtualColumn> virtualColumns;
private final RowSignature outputRowSignature;
private Projection(
@Nullable final List<PostAggregator> postAggregators,
@Nullable final List<VirtualColumn> virtualColumns,
final RowSignature outputRowSignature
)
{
if (postAggregators == null && virtualColumns == null) {
throw new IAE("postAggregators and virtualColumns cannot both be null");
} else if (postAggregators != null && virtualColumns != null) {
throw new IAE("postAggregators and virtualColumns cannot both be nonnull");
}
this.postAggregators = postAggregators;
this.virtualColumns = virtualColumns;
this.outputRowSignature = outputRowSignature;
}
private static void postAggregationHandleInputRefOrLiteral(
final Project project,
final PlannerContext plannerContext,
final RowSignature inputRowSignature,
final RexNode postAggregatorRexNode,
final List<String> rowOrder,
final PostAggregatorVisitor postAggregatorVisitor
)
{
// Attempt to convert to PostAggregator.
final DruidExpression postAggregatorExpression = Expressions.toDruidExpression(
plannerContext,
inputRowSignature,
postAggregatorRexNode
);
if (postAggregatorExpression == null) {
throw new CannotBuildQueryException(project, postAggregatorRexNode);
}
handlePostAggregatorExpression(
plannerContext,
inputRowSignature,
postAggregatorRexNode,
rowOrder,
postAggregatorVisitor,
postAggregatorExpression
);
}
private static void postAggregationHandleOtherKinds(
final Project project,
final PlannerContext plannerContext,
final RowSignature inputRowSignature,
final RexNode postAggregatorRexNode,
final List<String> rowOrder,
final PostAggregatorVisitor postAggregatorVisitor
)
{
PostAggregator pagg = OperatorConversions.toPostAggregator(
plannerContext,
inputRowSignature,
postAggregatorRexNode,
postAggregatorVisitor
);
if (pagg != null) {
postAggregatorVisitor.addPostAgg(pagg);
rowOrder.add(pagg.getName());
} else {
final DruidExpression postAggregatorExpression = Expressions.toDruidExpressionWithPostAggOperands(
plannerContext,
inputRowSignature,
postAggregatorRexNode,
postAggregatorVisitor
);
if (postAggregatorExpression == null) {
throw new CannotBuildQueryException(project, postAggregatorRexNode);
}
handlePostAggregatorExpression(
plannerContext,
inputRowSignature,
postAggregatorRexNode,
rowOrder,
postAggregatorVisitor,
postAggregatorExpression
);
}
}
private static void handlePostAggregatorExpression(
final PlannerContext plannerContext,
final RowSignature inputRowSignature,
final RexNode postAggregatorRexNode,
final List<String> rowOrder,
final PostAggregatorVisitor postAggregatorVisitor,
final DruidExpression postAggregatorExpression
)
{
if (postAggregatorComplexDirectColumnIsOk(inputRowSignature, postAggregatorExpression, postAggregatorRexNode)) {
// Direct column access on a COMPLEX column, expressions cannot operate on complex columns, only postaggs
// Wrap the column access in a field access postagg so that other postaggs can use it
final PostAggregator postAggregator = new FieldAccessPostAggregator(
postAggregatorVisitor.getOutputNamePrefix() + postAggregatorVisitor.getAndIncrementCounter(),
postAggregatorExpression.getDirectColumn()
);
postAggregatorVisitor.addPostAgg(postAggregator);
rowOrder.add(postAggregator.getName());
} else if (postAggregatorDirectColumnIsOk(inputRowSignature, postAggregatorExpression, postAggregatorRexNode)) {
// Direct column access, without any type cast as far as Druid's runtime is concerned.
// (There might be a SQL-level type cast that we don't care about)
rowOrder.add(postAggregatorExpression.getDirectColumn());
} else {
final PostAggregator postAggregator = new ExpressionPostAggregator(
postAggregatorVisitor.getOutputNamePrefix() + postAggregatorVisitor.getAndIncrementCounter(),
postAggregatorExpression.getExpression(),
null,
plannerContext.getExprMacroTable()
);
postAggregatorVisitor.addPostAgg(postAggregator);
rowOrder.add(postAggregator.getName());
}
}
public static Projection postAggregation(
final Project project,
final PlannerContext plannerContext,
final RowSignature inputRowSignature,
final String basePrefix
)
{
final List<String> rowOrder = new ArrayList<>();
final String outputNamePrefix = Calcites.findUnusedPrefix(
basePrefix,
new TreeSet<>(inputRowSignature.getRowOrder())
);
final PostAggregatorVisitor postAggVisitor = new PostAggregatorVisitor(outputNamePrefix);
for (final RexNode postAggregatorRexNode : project.getChildExps()) {
if (postAggregatorRexNode.getKind() == SqlKind.INPUT_REF || postAggregatorRexNode.getKind() == SqlKind.LITERAL) {
postAggregationHandleInputRefOrLiteral(
project,
plannerContext,
inputRowSignature,
postAggregatorRexNode,
rowOrder,
postAggVisitor
);
} else {
postAggregationHandleOtherKinds(
project,
plannerContext,
inputRowSignature,
postAggregatorRexNode,
rowOrder,
postAggVisitor
);
}
}
return new Projection(postAggVisitor.getPostAggs(), null, RowSignature.from(rowOrder, project.getRowType()));
}
public static Projection preAggregation(
final Project project,
final PlannerContext plannerContext,
final RowSignature inputRowSignature,
final VirtualColumnRegistry virtualColumnRegistry
)
{
final List<DruidExpression> expressions = new ArrayList<>();
for (final RexNode rexNode : project.getChildExps()) {
final DruidExpression expression = Expressions.toDruidExpression(
plannerContext,
inputRowSignature,
rexNode
);
if (expression == null) {
throw new CannotBuildQueryException(project, rexNode);
} else {
expressions.add(expression);
}
}
final Set<VirtualColumn> virtualColumns = new HashSet<>();
final List<String> rowOrder = new ArrayList<>();
for (int i = 0; i < expressions.size(); i++) {
final DruidExpression expression = expressions.get(i);
if (expression.isDirectColumnAccess()) {
rowOrder.add(expression.getDirectColumn());
} else {
final VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
expression,
project.getChildExps().get(i).getType().getSqlTypeName()
);
virtualColumns.add(virtualColumn);
rowOrder.add(virtualColumn.getOutputName());
}
}
return new Projection(
null,
ImmutableList.copyOf(virtualColumns),
RowSignature.from(rowOrder, project.getRowType())
);
}
/**
* Returns true if a post-aggregation "expression" can be realized as a direct field access. This is true if it's
* a direct column access that doesn't require an implicit cast.
*
* @param aggregateRowSignature signature of the aggregation
* @param expression post-aggregation expression
* @param rexNode RexNode for the post-aggregation expression
*
* @return yes or no
*/
private static boolean postAggregatorDirectColumnIsOk(
final RowSignature aggregateRowSignature,
final DruidExpression expression,
final RexNode rexNode
)
{
if (!expression.isDirectColumnAccess()) {
return false;
}
// We don't really have a way to cast complex type. So might as well not do anything and return.
final ValueType columnValueType = aggregateRowSignature.getColumnType(expression.getDirectColumn());
if (columnValueType == ValueType.COMPLEX) {
return true;
}
// Check if a cast is necessary.
final ExprType toExprType = Expressions.exprTypeForValueType(columnValueType);
final ExprType fromExprType = Expressions.exprTypeForValueType(
Calcites.getValueTypeForSqlTypeName(rexNode.getType().getSqlTypeName())
);
return toExprType.equals(fromExprType);
}
/**
* Returns true if a post-aggregation "expression" can be realized as a direct field access. This is true if it's
* a direct column access that doesn't require an implicit cast.
*
* @param aggregateRowSignature signature of the aggregation
* @param expression post-aggregation expression
* @param rexNode RexNode for the post-aggregation expression
*
* @return yes or no
*/
private static boolean postAggregatorComplexDirectColumnIsOk(
final RowSignature aggregateRowSignature,
final DruidExpression expression,
final RexNode rexNode
)
{
if (!expression.isDirectColumnAccess()) {
return false;
}
// Check if a cast is necessary.
final ValueType toValueType = aggregateRowSignature.getColumnType(expression.getDirectColumn());
final ValueType fromValueType = Calcites.getValueTypeForSqlTypeName(rexNode.getType().getSqlTypeName());
return toValueType == ValueType.COMPLEX && fromValueType == ValueType.COMPLEX;
}
public List<PostAggregator> getPostAggregators()
{
// If you ever see this error, it probably means a Projection was created in pre-aggregation mode, but then
// used in a post-aggregation context. This is likely a bug somewhere in DruidQuery. See class-level Javadocs.
return Preconditions.checkNotNull(postAggregators, "postAggregators");
}
public List<VirtualColumn> getVirtualColumns()
{
// If you ever see this error, it probably means a Projection was created in post-aggregation mode, but then
// used in a pre-aggregation context. This is likely a bug somewhere in DruidQuery. See class-level Javadocs.
return Preconditions.checkNotNull(virtualColumns, "virtualColumns");
}
public RowSignature getOutputRowSignature()
{
return outputRowSignature;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Projection that = (Projection) o;
return Objects.equals(postAggregators, that.postAggregators) &&
Objects.equals(virtualColumns, that.virtualColumns) &&
Objects.equals(outputRowSignature, that.outputRowSignature);
}
@Override
public int hashCode()
{
return Objects.hash(postAggregators, virtualColumns, outputRowSignature);
}
@Override
public String toString()
{
return "PostSortingExpressions{" +
"postAggregators=" + postAggregators +
", virtualColumns=" + virtualColumns +
", outputRowSignature=" + outputRowSignature +
'}';
}
}
|
<gh_stars>1-10
/*
* Copyright 2021 YugaByte, Inc. and Contributors
*
* Licensed under the Polyform Free Trial License 1.0.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://github.com/YugaByte/yugabyte-db/blob/master/licenses/POLYFORM-FREE-TRIAL-LICENSE-1.0.0.txt
*/
package com.yugabyte.yw.models.filters;
import com.yugabyte.yw.common.AlertDefinitionTemplate;
import com.yugabyte.yw.models.AlertDefinitionGroup;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import lombok.Builder;
import lombok.NonNull;
import lombok.Value;
@Value
@Builder
public class AlertDefinitionGroupFilter {
Set<UUID> uuids;
UUID customerUuid;
String name;
Boolean active;
AlertDefinitionGroup.TargetType targetType;
AlertDefinitionTemplate template;
UUID targetUuid;
UUID routeUuid;
// Can't use @Builder(toBuilder = true) as it sets null fields as well, which breaks non null
// checks.
public AlertDefinitionGroupFilterBuilder toBuilder() {
AlertDefinitionGroupFilterBuilder result = AlertDefinitionGroupFilter.builder();
if (uuids != null) {
result.uuids(uuids);
}
if (customerUuid != null) {
result.customerUuid(customerUuid);
}
if (name != null) {
result.name(name);
}
if (active != null) {
result.active(active);
}
if (targetType != null) {
result.targetType(targetType);
}
if (template != null) {
result.template(template);
}
if (targetUuid != null) {
result.targetUuid(targetUuid);
}
if (routeUuid != null) {
result.routeUuid(routeUuid);
}
return result;
}
public static class AlertDefinitionGroupFilterBuilder {
Set<UUID> uuids = new HashSet<>();
public AlertDefinitionGroupFilterBuilder uuid(@NonNull UUID uuid) {
this.uuids.add(uuid);
return this;
}
public AlertDefinitionGroupFilterBuilder uuids(@NonNull Collection<UUID> uuids) {
this.uuids.addAll(uuids);
return this;
}
public AlertDefinitionGroupFilterBuilder customerUuid(@NonNull UUID customerUuid) {
this.customerUuid = customerUuid;
return this;
}
public AlertDefinitionGroupFilterBuilder name(@NonNull String name) {
this.name = name;
return this;
}
public AlertDefinitionGroupFilterBuilder active(@NonNull Boolean active) {
this.active = active;
return this;
}
public AlertDefinitionGroupFilterBuilder targetType(
@NonNull AlertDefinitionGroup.TargetType targetType) {
this.targetType = targetType;
return this;
}
public AlertDefinitionGroupFilterBuilder template(@NonNull AlertDefinitionTemplate template) {
this.template = template;
return this;
}
public AlertDefinitionGroupFilterBuilder targetUuid(@NonNull UUID targetUuid) {
this.targetUuid = targetUuid;
return this;
}
public AlertDefinitionGroupFilterBuilder routeUuid(@NonNull UUID routeUuid) {
this.routeUuid = routeUuid;
return this;
}
}
}
|
#!/usr/bin/env bash
jshint --reporter node_modules/jshint-stylish/stylish.js src/*.js
cr -M 130 -C 3 -D 8 -f minimal src/*.js
browserify test/*.js > test/build/bundle.js
browserify -t coverify test/*.js | testling | coverify
browserify -s Router src/index.js > dist/router.js
uglifyjs dist/router.js -o dist/router.min.js -m -c unused=false -r Router,RouterResponse,RouterEvent
|
import logging
import os
import shutil
def copy_files_with_logging(source_dir, dest_dir):
"""
Copy all files from the source directory to the destination directory, preserving the directory structure.
Log the details of the file copying process using the Python logging module.
Args:
source_dir (str): The path of the source directory.
dest_dir (str): The path of the destination directory.
Returns:
None
"""
# Configure the logging
logging.basicConfig(filename='file_copy.log', level=logging.INFO, format='%(asctime)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
# Walk through the source directory and copy files to the destination directory
for root, dirs, files in os.walk(source_dir):
for file in files:
source_file_path = os.path.join(root, file)
dest_file_path = os.path.join(dest_dir, os.path.relpath(source_file_path, source_dir))
try:
shutil.copy2(source_file_path, dest_file_path)
logging.info(f'Copied file: {source_file_path} to {dest_file_path}')
except Exception as e:
logging.error(f'Error copying file: {source_file_path} - {e}')
# Example usage
source_directory = '/path/to/source'
destination_directory = '/path/to/destination'
copy_files_with_logging(source_directory, destination_directory) |
def calculate_depth(file_path: str) -> int:
# Split the file path by '/' and remove empty strings
directories = [d for d in file_path.split('/') if d]
# The depth is the number of directories in the path
return len(directories) |
package com.jensen.draculadaybyday.sql_lite;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.TextUtils;
import android.util.Log;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
public class SqlSortFactory implements Parcelable {
private final UniqueValueList<SortValue> sortingOrder;
public SqlSortFactory() {
sortingOrder = new UniqueValueList<>();
}
private SqlSortFactory(Parcel in) {
sortingOrder = new UniqueValueList<>();
try {
List<SortValue> list = new LinkedList<>();
in.readList(list, SortValue.class.getClassLoader());
sortingOrder.addAll(list);
} catch (Exception e) {
Log.d("Parcel prob", e.getMessage());
}
}
public List<SortValue> getSortingOrderList() {
List<SortValue> returnValue = new ArrayList<>();
returnValue.addAll(sortingOrder);
return returnValue;
}
public void bookOrder(boolean asc) {
sortingOrder.add(new SortValue(FragmentEntryDatabaseHandler.ENTRY_SEQ_NUM, asc, 0));
}
public void chapterOrder(boolean asc) {
sortingOrder.add(new SortValue(FragmentEntryDatabaseHandler.CHAPTER, asc, 1));
}
public void personOrder(boolean asc) {
sortingOrder.add(new SortValue(FragmentEntryDatabaseHandler.PERSON, asc, 2));
}
public void dateOrder(boolean asc) {
sortingOrder.add(new SortValue(FragmentEntryDatabaseHandler.DATE, asc, 3));
}
public void entryType(boolean asc) {
sortingOrder.add(new SortValue(FragmentEntryDatabaseHandler.TYPE, asc, 4));
}
public void readType(boolean asc) {
sortingOrder.add(new SortValue(FragmentEntryDatabaseHandler.UNREAD, asc, 5));
}
public String getSortOrder() {
return TextUtils.join(", ", sortingOrder);
}
public void writeToParcel(Parcel out, int flags) {
LinkedList<SortValue> list = new LinkedList<>();
list.addAll(sortingOrder);
out.writeList(list);
}
public int describeContents () {
return 0;
}
public static final Parcelable.Creator<SqlSortFactory> CREATOR
= new Parcelable.Creator<SqlSortFactory>() {
public SqlSortFactory createFromParcel(Parcel in) {
return new SqlSortFactory(in);
}
public SqlSortFactory[] newArray(int size) {
return new SqlSortFactory[size];
}
};
private class UniqueValueList<E> extends LinkedList<E> {
final HashMap<E, Integer> recallValue;
public UniqueValueList() {
super();
recallValue = new HashMap<>();
}
@Override
public boolean add(E item) {
boolean addCorrectly = true;
if (recallValue.containsKey(item)) {
// remove the order object
super.remove(item);
recallValue.remove(item);
addCorrectly = false;
}
// Add the item to the hashmap
recallValue.put(item, 1);
boolean addResult = super.add(item);
return addResult && addCorrectly;
}
@Override
public boolean addAll(Collection<? extends E> items) {
boolean addCorrectly = true;
for (E item : items) {
addCorrectly &= add(item);
}
return addCorrectly;
}
}
}
|
#ifndef __SSU_FILE_H__
#define __SSU_FILE_H__
#include <filesys/inode.h>
#define NR_FILEDES 5
#define O_RDONLY 0
#define O_WRONLY 1
#define O_RDWR 2
#define O_APPEND 4
#define O_TRUNC 8
#define F_DUPFD 1
#define F_GETFL 2
#define F_SETFL 4
#define SEEK_SET 0
#define SEEK_CUR 1
#define SEEK_END -1
#define MAXLEN 40
struct ssufile
{
struct inode *inode; // inode 구조체
uint16_t pos;
uint8_t flags;
uint8_t unused;
};
int file_seek(uint32_t fd, uint16_t pos);
int file_open(struct inode *inode,int flags, int mode);
int file_close(uint32_t fd);
int file_read(struct inode *file, size_t offset, void *buf, size_t len);
int file_write(struct inode *file, size_t offset, void *buf, size_t len);
#endif
|
#!/bin/bash
# This script contains code that should set up our scenario.
cat << EOT > intro.log
The courseBase.sh script for the intro has exectued in the background
and this text should now appear in the file "intro.log".
EOT
|
from django.db import models
from django.conf import settings
class Title(models.Model):
# Define fields for the book title
# ...
class Review(models.Model):
score = models.IntegerField(default=0) # Field for the review's score
pub_date = models.DateTimeField(auto_now_add=True) # Field for the publication date
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE) # Reference to the author model
title = models.ForeignKey(Title, related_name='reviews', on_delete=models.CASCADE) # Reference to the book title model |
SELECT * FROM Customers ORDER BY dob DESC LIMIT 1; |
project_name=php
bug_id=2a6968e43a
dir_name=$1/manybugs/$project_name/$bug_id
download_url=https://repairbenchmarks.cs.umass.edu/ManyBugs/scenarios/php-bug-2011-02-21-2a6968e43a-ecb9d8019c.tar.gz
current_dir=$PWD
mkdir -p $dir_name
cd $dir_name
wget $download_url
tar xfz php-bug-2011-02-21-2a6968e43a-ecb9d8019c.tar.gz
mv php-bug-2011-02-21-2a6968e43a-ecb9d8019c src
cd src/php
make clean
CC=wllvm CXX=wllvm++ ./configure --enable-cli --disable-dom --disable-xml --disable-pear --disable-simplexml --disable-phar --disable-inline-optimization --disable-fileinfo
CC=wllvm CXX=wllvm++ make -j32
CC=wllvm CXX=wllvm++ make sapi/cli/php -j32 |
import { COIN_MAP } from './coinmap';
import { COIN_TYPE } from './cointype';
import { STATUS_CODE } from './statuscode'
export {
COIN_MAP,COIN_TYPE,STATUS_CODE
} |
pub fn calculate_average_without_outliers(data: &[f32], precision: f32) -> f32 {
let mut sum = 0.0;
let mut count = 0;
for &value in data {
let mut is_outlier = false;
for &other_value in data {
if value != other_value && !close(value, other_value, precision) {
is_outlier = true;
break;
}
}
if !is_outlier {
sum += value;
count += 1;
}
}
if count == 0 {
return 0.0; // No non-outlier values
}
sum / count as f32
} |
<reponame>theutz/newinfinland.com
import facepaint from 'facepaint'
const font = `Tajawal, sans-serif`
const color = {
primary: `rgb(0, 46, 142)`,
black: `#333333`,
white: `#ffffff`,
}
const breakpoints = [769, 1024, 1216, 1408]
const mq = facepaint(breakpoints.map(bp => `@media (min-width: ${bp}px)`))
export { font, color, breakpoints, mq }
|
#!/bin/bash
# We need to install dependencies only for Docker
[[ ! -e /.dockerenv ]] && exit 0
set -xe
# Install git (the php image doesn't have it) which is required by composer
apt-get update -yqq
apt-get install git -yqq
# Install phpunit, the tool that we will use for testing
curl --location --output /usr/local/bin/phpunit https://phar.phpunit.de/phpunit.phar
chmod +x /usr/local/bin/phpunit |
<reponame>smagill/opensphere-desktop
package io.opensphere.shapefile;
import io.opensphere.core.Toolbox;
import io.opensphere.core.datafilter.DataFilterRegistry;
import io.opensphere.mantle.data.impl.DefaultDataTypeInfo;
import io.opensphere.shapefile.config.v1.ShapeFileSource;
/**
* The Class ShapeFileDataTypeInfo.
*/
public class ShapeFileDataTypeInfo extends DefaultDataTypeInfo
{
/** The my file source. */
private final ShapeFileSource myFileSource;
/**
* Constructor with key, name, and display name.
*
* @param tb - the {@link Toolbox}
* @param fileSource the file source
* @param sourcePrefix the source prefix
* @param typeKey - the type key
* @param typeName - the type name
* @param displayName - the display name.
* @param providerFiltersMetaData - true if the provider of this DataType is
* capable of and takes responsibility for filtering metadata
* using the {@link DataFilterRegistry} in the core
* {@link Toolbox}. If false the Mantle layer will provide the
* filtering for data at insert time.
*/
public ShapeFileDataTypeInfo(Toolbox tb, ShapeFileSource fileSource, String sourcePrefix, String typeKey, String typeName,
String displayName, boolean providerFiltersMetaData)
{
super(tb, sourcePrefix, typeKey, typeName, displayName, providerFiltersMetaData);
myFileSource = fileSource;
}
@Override
@SuppressWarnings("PMD.OverrideMerelyCallsSuper")
public boolean equals(Object obj)
{
return super.equals(obj);
}
/**
* Gets the file source.
*
* @return the file source
*/
public ShapeFileSource getFileSource()
{
return myFileSource;
}
@Override
@SuppressWarnings("PMD.OverrideMerelyCallsSuper")
public int hashCode()
{
return super.hashCode();
}
}
|
<reponame>magma/fbc-js-core
/**
* Copyright 2020 The Magma Authors.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @flow strict-local
* @format
*/
import {useEffect, useRef} from 'react';
type TFunction = () => void | Promise<void>;
// from https://overreacted.io/making-setinterval-declarative-with-react-hooks/
export default function useInterval(callback: TFunction, delay: number) {
const savedCallback = useRef<?TFunction>();
// Remember the latest callback.
useEffect(() => {
savedCallback.current = callback;
}, [callback]);
// Set up the interval.
useEffect(() => {
function tick() {
savedCallback.current && savedCallback.current();
}
if (delay !== null) {
const id = setInterval(tick, delay);
return () => clearInterval(id);
}
}, [delay]);
}
|
<filename>admin/src/store/mutations.js<gh_stars>1-10
import storage from '@/utils/storage'
export default {
// 侧边栏折叠
toggleCollapse(state, isCollapse) {
state.isCollapse = isCollapse
},
// 是否已登录
hasLogin(state, hasLogin) {
state.hasLogin = hasLogin
},
// 初始化 state 中的数据
initState(state) {
state.access_token = storage.getItem('access_token')
state.adminInfo = storage.getItem('adminInfo') || { username: 'admin', role: 'admin' }
storage.setItem('access_token', state.access_token)
storage.setItem('adminInfo', state.adminInfo)
},
// 登录成功
login(state, data) {
// 重置state中的数据
state.access_token = data.token
state.hasLogin = true
state.adminInfo = data.admin
storage.setItem('access_token', state.access_token)
storage.setItem('adminInfo', state.adminInfo)
},
// 退出登录
logout(state) {
// 重置state中的数据
state.access_token = ''
state.hasLogin = false
state.adminInfo = {
username: 'admin',
role: 'admin'
}
storage.setItem('access_token', state.access_token)
storage.setItem('adminInfo', state.adminInfo)
}
}
|
<filename>src/models/users/response.ts
import { BackGroundColorType } from "../../components/editProfile/constant";
import { SchoolType, UserType } from "../../interface/Common/user";
export interface UserSearchHistoryResponseType {
keywords: Array<{
history_id: number;
keyword: string;
}>
}
export interface UserSearchResponseType {
user_list: Array<{
user_id: number;
name: string;
profile_image_url: string;
user_scope: UserType;
school: SchoolType;
}>
}
export interface UserProfileResponseType {
id?: number;
name: string;
school: SchoolType;
user_scope: UserType;
profile_image_url: string;
profile_background_color: BackGroundColorType;
introduce?: string;
link_url?: string[];
feed_count: number;
follower_count: number;
following_count: number;
} |
<gh_stars>100-1000
//----------------------------------------------------------------------------//
// MODEL : OpenSOAP
// GROUP : Use SAX
// MODULE : CalcClientRequest.java
// ABSTRACT : CalcClient for Body SOAP Message Proc.
// [ ASYNC. Version ]
// DATE : 2002.02.01
// DESIGNED : Sunbit System k.Kuwa
//----------------------------------------------------------------------------//
// UpDate
// No. Registration Of Alteration Date User
//----------------------------------------------------------------------------//
//..+....1....+....2....+....3....+....4....+....5....+....6....+....7....+....8
import java.io.*;
import java.awt.*;
import org.xml.sax.*;
import org.xml.sax.helpers.*;
public class CalcClientRequest extends OpenSoapRequest {
protected StringBuffer r_anser = null,
r_message_id = null;
protected String val1 = null,
val2 = null,
operator = null,
responseTag = null,
responseNamespaceURI = null,
resultTag = null,
resultNamespaceURI = null;
protected static final int NONE = 0,
ENVELOPE = 1,
HEADER = 2,
BODY = 3,
FAULT = 4,
FAULT_CODE = 5,
FAULT_STRING = 6,
MESSAGEID = 7,
ANSER = 8,
RESPONSE = 9;
protected int status = NONE;
protected Label message;
protected Label pno;
public CalcClientRequest(Label message, Label pno) {
super(OpenSoapConstants.CALC_URI);
this.message = message;
this.pno = pno;
}
public void startDocument() throws SAXException {
status = NONE;
r_message_id = null;
val1 = null;
val2 = null;
operator = null;
responseTag = null;
resultTag = null;
}
public void startElement(String namespaceURI, String localName, String rawName, Attributes atts)
throws SAXException {
if(NONE == status) {
status = RESPONSE;
responseNamespaceURI = namespaceURI;
responseTag = localName;
} else if(RESPONSE == status && localName.equals("message_id") && null == r_message_id) {
status = HEADER;
r_message_id = new StringBuffer();
}
}
public void endElement(String namespaceURI, String localName, String rawName)
throws SAXException {
if ( localName.equals("message_id")) {
status = HEADER;
message.setText("");
pno.setText(r_message_id.toString());
}
}
public void characters(char[] ch,int start,int len) throws SAXException {
if(HEADER == status) r_message_id.append(ch,start,len);
}
public void SetVal1(String val1) {
this.val1 = val1;
}
public void SetVal2(String val2) {
this.val2 = val2;
}
public void SetOperator(String operator) {
this.operator = operator;
}
public void writeRequestHead(XMLWriter writer) throws IOException {
writer.write("<SOAP-ENV:Header>\n");
writer.write(" <opensoap-header:opensoap-header-block xmlns:opensoap-header='" +
OpenSoapConstants.SOAP_HEADER_URI +
"'>\n");
writer.write(" <opensoap-header:ttl opensoap-header:type='second'>80</opensoap-header:ttl>\n");
writer.write(" <opensoap-header:async>true</opensoap-header:async>\n");
writer.write(" </opensoap-header:opensoap-header-block>\n");
writer.write("</SOAP-ENV:Header>\n");
}
public void writeRequestBody(XMLWriter writer) throws IOException {
writer.write("<SOAP-ENV:Body>\n");
writer.write("<ns1:");
writer.escape(operator);
writer.write(" xmlns:ns1='");
writer.write(OpenSoapConstants.CALC_URI);
writer.write("' SOAP-ENV:encodingStyle='");
writer.write(OpenSoapConstants.SOAPENCODING_URI);
writer.write("'>\n <A>");
writer.escape(val1);
writer.write("</A>\n <B>");
writer.escape(val2);
writer.write("</B>\n</ns1:");
writer.escape(operator);
writer.write(">\n");
writer.write("</SOAP-ENV:Body>\n");
}
} |
//go:generate fyne bundle -o data.go Icon.png
// Package main launches the calculator app
package main
import "fyne.io/fyne/v2/app"
// see the readme for installation instructions
func main() {
app := app.New()
app.SetIcon(resourceIconPng)
c := newCalculator()
c.loadUI(app)
app.Run()
}
|
import { notification } from 'antd';
import { Toast } from 'antd-mobile';
import { isMobileDevice } from '..';
/**
* 错误处理
* 若需要判断是否为请求超时,参考 https://github.com/umijs/umi-request/issues/14
* @params error ResponseError
* @params showErrorNotification boolean 是否弹出错误通知
*/
const errorHandler = ({
error,
showErrorNotification = true,
}: {
error: any;
showErrorNotification?: boolean;
}) => {
const { status, url, message } = error;
console.error(`request error - statue:${status}, message:${message}, url:${url}`);
if (showErrorNotification) {
if (isMobileDevice()) {
Toast.info(message);
return;
}
notification.error({
message: `请求错误 ${status}: ${url}`,
description: message,
});
}
};
export default errorHandler;
|
temp_fahrenheit=$(echo "scale = 4; $temp_celsius * 1.8000 + 32.00" | bc)
echo $temp_fahrenheit |
def findAnagrams(word):
# Create an empty list to store anagrams
anagrams = []
n = len(word)
# Create a backtracking function to find an anagram
def backtrack(temp, data, visited, n):
# If the string formed by 'temp' is same as
# the string formed by 'data', add it to anagrams
if len(temp) == n:
if temp == data:
anagrams.append(temp)
return
# Iterate through all the characters
for i in range(n):
# Skip if the character has been visited
if visited[i] == True:
continue
# Mark the character as visited
visited[i] = True
temp += data[i]
# Generate all permutations of the remaining characters
backtrack(temp, data, visited, n)
# Backtrack
temp = temp[:-1]
visited[i] = False
# Create a data structure to store the visited characters
visited = [False] * n
backtrack("", word, visited, n)
return anagrams |
import random
import string
def generate_password():
chars = string.ascii_letters + string.digits + string.punctuation
password = ''.join(random.choice(chars) for i in range(12))
return password
if name == 'main':
password = generate_password()
print(password) |
<reponame>schinmayee/nimbus<filename>applications/physbam/physbam-lib/Public_Library/PhysBAM_Fluids/PhysBAM_Compressible/Euler_Equations/EULER_CAVITATION_UNIFORM.cpp<gh_stars>10-100
//#####################################################################
// Copyright 2010, <NAME>, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
#include <PhysBAM_Tools/Grids_Uniform/UNIFORM_GRID_ITERATOR_CELL.h>
#include <PhysBAM_Tools/Grids_Uniform/UNIFORM_GRID_ITERATOR_FACE.h>
#include <PhysBAM_Tools/Grids_Uniform_Arrays/ARRAYS_UTILITIES.h>
#include <PhysBAM_Tools/Log/DEBUG_SUBSTEPS.h>
#include <PhysBAM_Geometry/Collisions/COLLISION_GEOMETRY_ID.h>
#include <PhysBAM_Geometry/Grids_Uniform_Collisions/GRID_BASED_COLLISION_GEOMETRY_UNIFORM.h>
#include <PhysBAM_Geometry/Grids_Uniform_PDE_Linear/LAPLACE_COLLIDABLE_UNIFORM.h>
#include <PhysBAM_Fluids/PhysBAM_Compressible/Euler_Equations/EULER_CAVITATION_UNIFORM.h>
#include <PhysBAM_Fluids/PhysBAM_Compressible/Euler_Equations/EULER_LAPLACE.h>
#include <PhysBAM_Fluids/PhysBAM_Compressible/Euler_Equations/EULER_UNIFORM.h>
#include <PhysBAM_Fluids/PhysBAM_Compressible/Euler_Equations/INCOMPRESSIBLE_COMPRESSIBLE_COUPLING_CALLBACKS.h>
using namespace PhysBAM;
//#####################################################################
// Constructor
//#####################################################################
template<class TV> EULER_CAVITATION_UNIFORM<TV>::
EULER_CAVITATION_UNIFORM(EULER_UNIFORM<T_GRID>& euler_input, const bool clamp_density_input, const T epsilon_input)
:euler(euler_input),epsilon(epsilon_input),clamp_density(clamp_density_input)
{
elliptic_solver=new LAPLACE_COLLIDABLE_UNIFORM<T_GRID>(euler.grid,p_cavitation,false,false,true);
Initialize_Grid();
}
//#####################################################################
// Destructor
//#####################################################################
template<class TV> EULER_CAVITATION_UNIFORM<TV>::
~EULER_CAVITATION_UNIFORM()
{
delete elliptic_solver;
}
//#####################################################################
// Initialize_Grid
//#####################################################################
template<class TV> void EULER_CAVITATION_UNIFORM<TV>::
Initialize_Grid()
{
elliptic_solver->Initialize_Grid(euler.grid);
p_cavitation.Resize(euler.grid.Domain_Indices(1));
clamped_momentum_divergence.Resize(euler.grid.Domain_Indices(0));
clamped_internal_energy_divergence.Resize(euler.grid.Domain_Indices(0));
}
//#####################################################################
// Fill_Ghost_Pressures_Along_Neumann_Boundaries
//#####################################################################
template<class TV> void EULER_CAVITATION_UNIFORM<TV>::
Fill_Ghost_Pressures_Along_Neumann_Boundaries()
{
// iterate over boundary faces, and if neumann face, copy from second_cell to first_cell
for(FACE_ITERATOR iterator(euler.grid);iterator.Valid();iterator.Next()){
int axis=iterator.Axis();TV_INT face_index=iterator.Face_Index();
if(elliptic_solver->psi_N.Component(axis)(face_index)){
TV_INT first_cell_index=iterator.First_Cell_Index(),second_cell_index=iterator.Second_Cell_Index();
bool first_cell_inside_object=elliptic_solver->psi_D(first_cell_index);
bool second_cell_inside_object=elliptic_solver->psi_D(second_cell_index);
bool first_cell_inside_domain=euler.grid.Domain_Indices().Lazy_Inside(first_cell_index);
bool second_cell_inside_domain=euler.grid.Domain_Indices().Lazy_Inside(second_cell_index);
if((first_cell_inside_object&&(!second_cell_inside_object))||(second_cell_inside_domain&&(!first_cell_inside_domain)))
p_cavitation(first_cell_index)=p_cavitation(second_cell_index);
else if((second_cell_inside_object&&(!first_cell_inside_object))||(first_cell_inside_domain&&(!second_cell_inside_domain)))
p_cavitation(second_cell_index)=p_cavitation(first_cell_index);
else PHYSBAM_FATAL_ERROR("Error while copying pressures across Neumann boundaries!");}}
}
//#####################################################################
// Compute_Clamped_Momentum_Divergence
//#####################################################################
template<class TV> void EULER_CAVITATION_UNIFORM<TV>::
Compute_Clamped_Momentum_Divergence(const T dt)
{
T one_over_dt = (T)1/dt;
// Compute indices of those cells where density is greater than 2*epsilon
T_ARRAYS_BOOL sufficient_density_cells(euler.grid.Domain_Indices(0));
T total_deficient_density=(T)0;
T total_donor_density=(T)0;
for(CELL_ITERATOR iterator(euler.grid,0);iterator.Valid();iterator.Next()){
TV_INT cell_index=iterator.Cell_Index();
clamped_momentum_divergence(cell_index)=0;
if(elliptic_solver->psi_D(cell_index)) continue;
sufficient_density_cells(cell_index)=false;
if(euler.U(cell_index)(1)>5*epsilon){
sufficient_density_cells(cell_index)=true;
total_donor_density+=(euler.U(cell_index)(1)-5*epsilon);}
else if(euler.U(cell_index)(1)<epsilon){
clamped_momentum_divergence(cell_index)=min((T)0,(euler.U(cell_index)(1) - epsilon)*one_over_dt);
std::stringstream ss;ss<<"clamping density at cell_index="<<cell_index<<", density="<<euler.U(cell_index)(1)<<", epsilon="<<epsilon<<std::endl;LOG::filecout(ss.str());
total_deficient_density+=(epsilon-euler.U(cell_index)(1));}}
T fractional_contribution_from_each_cell=total_deficient_density/total_donor_density;
std::stringstream ss;ss<<"Fractional contribution from each cell:"<<fractional_contribution_from_each_cell<<std::endl;LOG::filecout(ss.str());
if(fractional_contribution_from_each_cell >= 1) PHYSBAM_FATAL_ERROR();
for(CELL_ITERATOR iterator(euler.grid,0);iterator.Valid();iterator.Next()){
TV_INT cell_index=iterator.Cell_Index();
if(sufficient_density_cells(cell_index))
clamped_momentum_divergence(cell_index)=fractional_contribution_from_each_cell*(euler.U(cell_index)(1)-5*epsilon)*one_over_dt;}
}
//#####################################################################
// Compute_Clamped_Internal_Energy_Divergence
//#####################################################################
template<class TV> void EULER_CAVITATION_UNIFORM<TV>::
Compute_Clamped_Internal_Energy_Divergence(const T dt)
{
T one_over_dt = (T)1/dt;
// Compute indices of those cells where density greater than 2*epsilon
T_ARRAYS_BOOL sufficient_internal_energy_cells(euler.grid.Domain_Indices(0));
T total_deficient_energy=(T)0;
T total_donor_energy=(T)0;
for(CELL_ITERATOR iterator(euler.grid,0);iterator.Valid();iterator.Next()){
TV_INT cell_index=iterator.Cell_Index();
clamped_internal_energy_divergence(cell_index)=0;
if(elliptic_solver->psi_D(cell_index)) continue;
sufficient_internal_energy_cells(cell_index)=false;
if(euler.U(cell_index)(1)*EULER<T_GRID>::e(euler.U,cell_index)>5*epsilon){
sufficient_internal_energy_cells(cell_index)=true;
total_donor_energy+=(euler.U(cell_index)(1)*EULER<T_GRID>::e(euler.U,cell_index)-5*epsilon);}
else if(euler.U(cell_index)(1)*EULER<T_GRID>::e(euler.U,cell_index)<epsilon){
clamped_internal_energy_divergence(cell_index)=min((T)0,(euler.U(cell_index)(1)*EULER<T_GRID>::e(euler.U,cell_index)-epsilon)*one_over_dt);
std::stringstream ss;ss<<"clamping energy at cell_index="<<cell_index<<", internal energy="<<euler.U(cell_index)(1)*EULER<T_GRID>::e(euler.U,cell_index)<<", epsilon="<<epsilon<<std::endl;LOG::filecout(ss.str());
total_deficient_energy+=(epsilon-euler.U(cell_index)(1)*EULER<T_GRID>::e(euler.U,cell_index));}}
T fractional_contribution_from_each_cell=total_deficient_energy/total_donor_energy;
std::stringstream ss;ss<<"Fractional contribution from each cell:"<<fractional_contribution_from_each_cell<<std::endl;LOG::filecout(ss.str());
if(fractional_contribution_from_each_cell >= 1) PHYSBAM_FATAL_ERROR();
for(CELL_ITERATOR iterator(euler.grid,0);iterator.Valid();iterator.Next()){
TV_INT cell_index=iterator.Cell_Index();
if(sufficient_internal_energy_cells(cell_index))
clamped_internal_energy_divergence(cell_index)=fractional_contribution_from_each_cell*(euler.U(cell_index)(1)*EULER<T_GRID>::e(euler.U,cell_index)-5*epsilon)*one_over_dt;}
}
//#####################################################################
// Compute_Right_Hand_Side
//#####################################################################
template<class TV> void EULER_CAVITATION_UNIFORM<TV>::
Compute_Right_Hand_Side(const T dt)
{
if(clamp_density){
Compute_Clamped_Momentum_Divergence(dt);
for(CELL_ITERATOR iterator(euler.grid);iterator.Valid();iterator.Next()){
TV_INT cell_index=iterator.Cell_Index();
elliptic_solver->f(cell_index) = -clamped_momentum_divergence(cell_index);}}
else{
Compute_Clamped_Internal_Energy_Divergence(dt);
for(CELL_ITERATOR iterator(euler.grid);iterator.Valid();iterator.Next()){
TV_INT cell_index=iterator.Cell_Index();
elliptic_solver->f(cell_index) = -clamped_internal_energy_divergence(cell_index);}}
}
//#####################################################################
// Compute_Pressure
//#####################################################################
template<class TV> void EULER_CAVITATION_UNIFORM<TV>::
Compute_Pressure(const T dt,const T time)
{
Compute_Right_Hand_Side(dt);
elliptic_solver->Find_Solution_Regions(); // flood fill
elliptic_solver->Solve(time,true); // solve all regions
Fill_Ghost_Pressures_Along_Neumann_Boundaries();
}
//#####################################################################
// Apply_Pressure_To_Density
//#####################################################################
template<class TV> void EULER_CAVITATION_UNIFORM<TV>::
Apply_Pressure_To_Density(const T dt)
{
T_FACE_ARRAYS_SCALAR grad_p_cavitation_face(euler.grid);
ARRAYS_UTILITIES<T_GRID,T>::Compute_Gradient_At_Faces_From_Cell_Data(euler.grid,grad_p_cavitation_face,p_cavitation);
T_ARRAYS_SCALAR laplacian_p_cavitation_cell(euler.grid.Domain_Indices(0));
ARRAYS_UTILITIES<T_GRID,T>::Compute_Divergence_At_Cells_From_Face_Data(euler.grid,laplacian_p_cavitation_cell,grad_p_cavitation_face);
for(CELL_ITERATOR iterator(euler.grid);iterator.Valid();iterator.Next()){
TV_INT cell_index=iterator.Cell_Index();
euler.U(cell_index)(1) += laplacian_p_cavitation_cell(cell_index)*dt;}
euler.Invalidate_Ghost_Cells();
}
//#####################################################################
// Is_Density_Clamped
//#####################################################################
template<class TV> bool EULER_CAVITATION_UNIFORM<TV>::
Is_Density_Clamped()
{
return clamp_density;
}
//#####################################################################
// Apply_Pressure_To_Internal_Energy
//#####################################################################
template<class TV> void EULER_CAVITATION_UNIFORM<TV>::
Apply_Pressure_To_Internal_Energy(const T dt)
{
T_FACE_ARRAYS_SCALAR grad_p_cavitation_face(euler.grid);
ARRAYS_UTILITIES<T_GRID,T>::Compute_Gradient_At_Faces_From_Cell_Data(euler.grid,grad_p_cavitation_face,p_cavitation);
T_ARRAYS_SCALAR laplacian_p_cavitation_cell(euler.grid.Domain_Indices(0));
ARRAYS_UTILITIES<T_GRID,T>::Compute_Divergence_At_Cells_From_Face_Data(euler.grid,laplacian_p_cavitation_cell,grad_p_cavitation_face);
for(CELL_ITERATOR iterator(euler.grid);iterator.Valid();iterator.Next()){
TV_INT cell_index=iterator.Cell_Index();
euler.U(cell_index)(T_GRID::dimension+2) += laplacian_p_cavitation_cell(cell_index)*dt;}
euler.Invalidate_Ghost_Cells();
}
template<class TV> void EULER_CAVITATION_UNIFORM<TV>::
Compute_Face_Pressure_From_Cell_Pressures(const T_GRID& face_grid,T_FACE_ARRAYS_SCALAR& p_face,const T_ARRAYS_SCALAR& p_cell)
{
TV_INT first_cell_index,second_cell_index;int axis;
for(FACE_ITERATOR iterator(face_grid);iterator.Valid();iterator.Next()){
first_cell_index=iterator.First_Cell_Index();second_cell_index=iterator.Second_Cell_Index();axis=iterator.Axis();
p_face.Component(axis)(iterator.Face_Index())=(p_cell(first_cell_index)+p_cell(second_cell_index))*(T).5;}
}
//#####################################################################
// Apply_Pressure
//#####################################################################
template<class TV> void EULER_CAVITATION_UNIFORM<TV>::
Apply_Pressure(const T dt,const T time, T_FACE_ARRAYS_SCALAR& face_velocities)
{
if(clamp_density)
{
// Store time star density values
T_ARRAYS_SCALAR rho_star(euler.grid.Domain_Indices(0));
for(CELL_ITERATOR iterator(euler.grid,0);iterator.Valid();iterator.Next()){TV_INT cell_index=iterator.Cell_Index();
rho_star(cell_index)=euler.U_ghost(cell_index)(1);}
Apply_Pressure_To_Density(dt);
euler.Fill_Ghost_Cells(dt,time,1);
T_FACE_ARRAYS_SCALAR p_face;
p_face.Resize(euler.grid);
Compute_Face_Pressure_From_Cell_Pressures(euler.grid,p_face,p_cavitation);
T_FACE_ARRAYS_SCALAR grad_p_cavitation_face(euler.grid);
ARRAYS_UTILITIES<T_GRID,T>::Compute_Gradient_At_Faces_From_Cell_Data(euler.grid,grad_p_cavitation_face,p_cavitation);
for(FACE_ITERATOR iterator(euler.grid);iterator.Valid();iterator.Next()){FACE_INDEX<TV::dimension> face_index=iterator.Full_Index();
TV_INT first_cell_index=iterator.First_Cell_Index(), second_cell_index=iterator.Second_Cell_Index();
T rho_star_face=(T).5*(rho_star(first_cell_index)+rho_star(second_cell_index));
T rho_np1_face=(T).5*(euler.U_ghost(first_cell_index)(1)+euler.U_ghost(second_cell_index)(1));
face_velocities(face_index)=(rho_star_face*face_velocities(face_index) - dt*grad_p_cavitation_face(face_index))/rho_np1_face;}
//T_FACE_ARRAYS_SCALAR face_velocities;
//face_velocities.Resize(euler.grid);
//EULER_PROJECTION_UNIFORM<T_GRID>::Compute_Density_Weighted_Face_Velocities(euler.grid,face_velocities,euler.U_ghost,elliptic_solver->psi_N);
T_ARRAYS_SCALAR density_scaling(euler.grid.Domain_Indices(1));density_scaling.Fill((T)1);
EULER_PROJECTION_UNIFORM<T_GRID>::Apply_Pressure(p_cavitation,p_face,face_velocities,elliptic_solver->psi_D,elliptic_solver->psi_N,dt,time,density_scaling,0,&euler);
}
else
Apply_Pressure_To_Internal_Energy(dt);
}
//#####################################################################
// Apply_Pressure
//#####################################################################
template<class TV> void EULER_CAVITATION_UNIFORM<TV>::
Apply_Cavitation_Correction(const T dt,const T time, T_FACE_ARRAYS_SCALAR& face_velocities)
{
Compute_Pressure(dt,time);
Apply_Pressure(dt,time,face_velocities);
}
//#####################################################################
// Log_Parameters
//#####################################################################
template<class TV> void EULER_CAVITATION_UNIFORM<TV>::
Log_Parameters() const
{
LOG::SCOPE scope("EULER_CAVITATION_UNIFORM parameters");
std::stringstream ss;ss<<"epsilon="<<epsilon<<std::endl;LOG::filecout(ss.str());
}
//#####################################################################
template class EULER_CAVITATION_UNIFORM<VECTOR<float,1> >;
template class EULER_CAVITATION_UNIFORM<VECTOR<float,2> >;
template class EULER_CAVITATION_UNIFORM<VECTOR<float,3> >;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class EULER_CAVITATION_UNIFORM<VECTOR<double,1> >;
template class EULER_CAVITATION_UNIFORM<VECTOR<double,2> >;
template class EULER_CAVITATION_UNIFORM<VECTOR<double,3> >;
#endif
|
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.gwt.common.client.form;
import java.util.ArrayList;
import java.util.List;
import com.gwtext.client.widgets.Button;
import com.gwtext.client.widgets.Panel;
import com.gwtext.client.widgets.Window;
import com.gwtext.client.widgets.form.TextField;
import com.gwtext.client.widgets.layout.FitLayout;
import com.gwtext.client.widgets.layout.HorizontalLayout;
import com.gwtext.client.widgets.layout.RowLayout;
import org.opentaps.gwt.common.client.UtilUi;
import org.opentaps.gwt.common.client.events.LoadableListener;
import org.opentaps.gwt.common.client.form.base.BaseFormPanel;
import org.opentaps.gwt.common.client.listviews.EntityListView;
import org.opentaps.gwt.common.client.listviews.SearchResultsListViewInterface;
import org.opentaps.gwt.common.client.lookup.configuration.SearchLookupConfiguration;
/**
* A generic search form.
* This contains a simple text input and a search button.
* Results are presented in a popup window with one result grid by search type.
*/
public class MultiSearchForm extends BaseFormPanel {
private final Window win;
private final TextField searchInput;
private final Panel winInnerPanel;
private List<SearchResultsListViewInterface> resultGrids = new ArrayList<SearchResultsListViewInterface>();
private static final int RESULT_GRID_HEIGHT = 500;
private static final int RESULT_WINDOW_HEIGHT = 700;
private static final int RESULT_WINDOW_WIDTH = 900;
/**
* Default constructor.
*/
public MultiSearchForm() {
super();
setBorder(false);
setHideLabels(true);
// using an inner panel to customize the layout
Panel innerPanel = new Panel();
innerPanel.setBorder(false);
innerPanel.setLayout(new HorizontalLayout(5));
searchInput = new TextField();
searchInput.setName(SearchLookupConfiguration.IN_QUERY);
searchInput.setWidth(200); // width of search input box
setFieldListeners(searchInput);
innerPanel.add(searchInput);
Button submitButton = makeStandardSubmitButton(UtilUi.MSG.search());
innerPanel.add(submitButton);
add(innerPanel);
winInnerPanel = new Panel();
winInnerPanel.setBorder(false);
winInnerPanel.setFrame(false);
winInnerPanel.setHeader(false);
winInnerPanel.setLayout(new RowLayout());
winInnerPanel.setAutoScroll(true);
win = new Window(UtilUi.MSG.searchResults());
win.setModal(false);
win.setResizable(true);
win.setMinHeight(RESULT_WINDOW_HEIGHT);
win.setWidth(RESULT_WINDOW_WIDTH);
win.setAutoScroll(true);
win.setCloseAction(Window.HIDE);
win.setLayout(new FitLayout());
win.add(winInnerPanel);
}
public <T extends EntityListView & SearchResultsListViewInterface> void addResultsGrid(final T grid) {
// add special handler to auto collapse sections with no results
// also set the title
grid.addLoadableListener(new LoadableListener() {
public void onLoad() {
String title = grid.getTitle().split(" - ")[0];
if (grid.getStore().getRecords().length == 0) {
grid.collapse();
grid.setTitle(title + " - " + UtilUi.MSG.searchNoResults(searchInput.getText()));
} else {
grid.expand();
grid.setTitle(title + " - " + grid.getStore().getTotalCount() + " " + (grid.getStore().getTotalCount() > 1 ? UtilUi.MSG.searchItems() : UtilUi.MSG.searchItem()));
}
}
});
resultGrids.add(grid);
grid.setFrame(false);
grid.setAutoHeight(false);
grid.setBorder(false);
grid.setWidth(RESULT_WINDOW_WIDTH);
grid.setHeight(RESULT_GRID_HEIGHT);
winInnerPanel.add(grid);
}
@Override public void submit() {
search();
}
private void search() {
int ph = com.google.gwt.user.client.Window.getClientHeight();
int pw = com.google.gwt.user.client.Window.getClientWidth();
int h = win.getHeight();
if (h < RESULT_WINDOW_HEIGHT) {
h = RESULT_WINDOW_HEIGHT;
}
// make sure the window is not bigger than the page
// because the popup does not have scrollbars
if (h > ph) {
h = ph - 50;
}
win.setHeight(h);
int w = win.getWidth();
if (w < RESULT_WINDOW_WIDTH) {
w = RESULT_WINDOW_WIDTH;
}
// make sure the window is not bigger than the page
// because the popup does not have scrollbars
if (w > pw) {
w = pw - 50;
}
win.setWidth(w);
win.show();
win.center();
for (SearchResultsListViewInterface grid : resultGrids) {
grid.search(searchInput.getText());
}
}
}
|
#!/bin/sh --
# Network interface statistics
. @@CONF@@/freebsd/common.sh
${BIN_NETSTAT} -i -b -n -W -f link | ${BIN_AWK} '{
if ($1 == "Name") next;
if ($1 ~ /^lo[0-9]/) next;
sub(/\*$/,"",$1);
printf("%s`in_bytes\tL\t%d\n", $1, $8);
printf("%s`in_packets\tL\t%d\n", $1, $5);
printf("%s`in_errors\tL\t%d\n", $1, $6);
printf("%s`out_bytes\tL\t%d\n", $1, $11);
printf("%s`out_packets\tL\t%d\n", $1, $9);
printf("%s`out_errors\tL\t%d\n", $1, $10);
}'
|
<gh_stars>1-10
export declare type Pos = {
col: number;
row: number;
file?: string;
}
export declare type Token = {
type: string;
val: string;
tagName?: string;
pos: Pos;
}
const ifStatement_Re = /if=["][ \w=<>&.\-_'"&\(\)\|]+["]/;
const ifStatement_Re_2 = /{{[ ]*if\([ \w.$\[\]"'=<>+\-,&\(\)\|]+\)[ ]*}}/;
const elseIfStatement_Re = /else-if=["][ \w=<>&.\-_'"&\(\)\|]+["]/;
const elseIfStatement_Re_2 = /{{[ ]*else if\([ \w.$\[\]"'=<>+\-,'"&\(\)\|]+\)[ ]*}}/;
const elseStatement_Re = /else/;
const elseStatement_Re_2 = /{{[ ]*else[ ]*}}/;
const forStatement_Re = /for=["']let[ \w.$\[\],;:'"]+['"]/;
const forStatement_Re_2 = /{{[ ]*for\([ a-zA-Z0-9_\w.$\[\]=<>\-+,]+\)[ ]*}}/;
export const forEach_Re = /{{[ ]*[a-zA-Z0-9.\[\]_]+[.]forEach\(\([ a-zA-Z0-9,._]+\)=>\)[ ]*}}/;
const on_Re = /\*on[a-z]+="[ a-z0-9_\(\).,]+"/i;
const text_Re = /[ \w"'=\(\)\n\t!&^%$#@\-:_+\\/,.?\[\]>]+/i;
const openTagStart_Re = /<[-_;:&%$#@+=*\w]+/i;
const attribute_Re = /[-_:&$#@*\w]+=["|'][ '\w\-_.:&$#@\(\)\{\}*]+['|"]/i;
const dynamicAttr_Re = /[-_:*a-z0-9]+={{[ a-z0-9._\[\]]+}}/i;
const css_Re = /style=["'][a-z\-\;0-9\: ]+['"]/i;
const link_Re = /href=["'][a-z\-\;0-9\://. ]+['"]/i;
const dynamicData_Re = /{{[ ]*[a-z0-9_.$\[\]\(\)\+"'\-_, ]+[ ]*}}/i;
const closeTag_Re = /<\/[-_;:&%$#@+=*\w]+>/i;
const javascriptSrc_Reg = /<script>[ \w"'=\(\)\n\t!&^%$#@\-:_<>+\/,.\?\[\]><?;\\]+<\/script>/i;
export class Lexer {
private pos: Pos = { col: 1, row: 1 };
private cursor: number;
private tokens: Array<Token> = [];
private currentStatus: string;
constructor(private input: string) {
this.cursor = 0;
for (; ;) {
if (this.openTagStart) {
if (this.openTagStart === "<script") {
let jsCodeEnd = this.input.indexOf("</script>", this.cursor)
let jsCode = "\n" + this.input.slice(this.cursor, jsCodeEnd + 9)
this.tokens.push({
type: "Text",
val: jsCode,
pos: Object.freeze({ ...this.pos })
})
this.consume(jsCode)
}
else {
this.tokens.push({
type: "OpenTagStart",
val: this.openTagStart,
tagName: this.openTagStart.substring(1),
pos: Object.freeze({ ...this.pos })
})
this.currentStatus = "attributes"
this.consume(this.openTagStart)
}
}
else if (this.dynamicAttr) {
this.tokens.push({
type: "DynamicAttribute",
val: this.dynamicAttr,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.dynamicAttr)
}
else if (this.css) {
this.tokens.push({
type: "CSS",
val: this.css,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.css)
}
else if (this.link) {
this.tokens.push({
type: "Attribute",
val: this.link,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.link)
}
else if (this.elseIfStatement) {
this.tokens.push({
type: "ElseIfStatement",
val: this.elseIfStatement,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.elseIfStatement)
}
else if (this.elseStatement) {
this.tokens.push({
type: "ElseStatement",
val: this.elseStatement,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.elseStatement)
}
else if (this.ifStatement) {
this.tokens.push({
type: "IfStatement",
val: this.ifStatement,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.ifStatement)
}
else if (this.ifStatement2) {
this.tokens.push({
type: "IfStatement",
val: this.ifStatement2,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.ifStatement2)
}
else if (this.forStatement2) {
this.tokens.push({
type: "ForStatement",
val: this.forStatement2,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.forStatement2)
}
else if (this.forStatement) {
this.tokens.push({
type: "ForStatement",
val: this.forStatement,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.forStatement)
}
else if (this.forEach) {
this.tokens.push({
type: "ForStatement",
val: this.forEach,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.forEach)
}
else if (this.on) {
this.tokens.push({
type: "Event",
val: this.on,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.on)
}
else if (this.attribute) {
this.tokens.push({
type: "Attribute",
val: this.attribute,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.attribute)
}
else if (this.selfClosingTag) {
this.tokens.push({
type: "SelfClosingTag",
val: this.selfClosingTag,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.selfClosingTag)
}
else if (this.openTagEnd) {
this.currentStatus = "innerHTML"
this.tokens.push({
type: "OpenTagEnd",
val: this.openTagEnd,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.openTagEnd)
}
else if (this.whiteSpace) {
let lastToken = this.tokens[this.tokens.length - 1].type
if (
lastToken !== "CloseTag" &&
lastToken !== "SelfClosingTag"
) {
this.tokens.push({
type: "Text",
val: this.whiteSpace,
pos: Object.freeze({ ...this.pos })
})
}
this.consume(this.whiteSpace);
}
else if (this.input[0] === "\n") {
this.newLIne()
this.consume("\n")
}
else if (this.dynamicData) {
let type: string;
if (this.dynamicData.search(elseStatement_Re_2) > -1 && this.currentStatus === "attributes") {
type = "IfStatement"
} else {
type = "DynamicData"
}
this.tokens.push({
type,
val: this.dynamicData,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.dynamicData)
}
else if (this.text) {
let type = this.currentStatus = "innerHTML" ?
"Text" : "Attribute";
this.tokens.push({
type,
val: this.text,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.text);
}
else if (this.closeTag) {
this.tokens.push({
type: "CloseTag",
val: this.closeTag,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.closeTag)
}
else if (this.comparisonOp) {
this.tokens.push({
type: "Text",
val: this.comparisonOp,
pos: Object.freeze({ ...this.pos })
})
this.consume(this.comparisonOp);
}
else if (this.eof) {
this.tokens.push({
type: "eof",
val: "eof",
pos: Object.freeze({ ...this.pos })
})
break;
}
else {
this.next()
}
}
}
private next() {
this.pos.col++;
this.cursor++;
this.input = this.input.substring(1)
}
private consume(lexeme: string) {
this.pos.col += lexeme.length;
this.input = this.input.substring(lexeme.length)
}
private newLIne() {
this.pos.row++;
this.pos.col = -1;
}
private get eof() {
return this.input[this.cursor] === undefined;
}
private get openTagStart() {
if (this.doesNotContain(openTagStart_Re)) return false;
let opTag = this.input.match(openTagStart_Re)[0];
return this.input.indexOf(opTag) === 0 && opTag;
}
private get attribute() {
if (this.doesNotContain(attribute_Re)) return false;
let attr = this.input.match(attribute_Re)[0];
return this.input.indexOf(attr) === 0 && attr;
}
private get css() {
if (this.doesNotContain(css_Re)) return false;
let style = this.input.match(css_Re)[0];
return this.input.indexOf(style) === 0 && style;
}
private get link() {
if (this.doesNotContain(link_Re)) return false;
let link = this.input.match(link_Re)[0];
return this.input.indexOf(link) === 0 && link;
}
private get dynamicAttr() {
if (this.doesNotContain(dynamicAttr_Re)) return false;
let attr = this.input.match(dynamicAttr_Re)[0];
return this.input.indexOf(attr) === 0 && attr;
}
private get openTagEnd() {
if (this.doesNotContain(">")) return false;
let tagENd = this.input.match(">")[0];
return this.input.indexOf(tagENd) === 0 && tagENd;
}
public get selfClosingTag() {
if (this.doesNotContain("/>")) return false;
let tagENd = this.input.match("/>")[0];
return this.input.indexOf(tagENd) === 0 && tagENd;
}
private get dynamicData() {
if (this.doesNotContain(dynamicData_Re)) return false;
let identifier = this.input.match(dynamicData_Re)[0];
return this.input.indexOf(identifier) === 0 && identifier;
}
private get comparisonOp() {
let compOp_Re = /[<>]/;
if (this.doesNotContain(compOp_Re)) return false;
let identifier = this.input.match(compOp_Re)[0];
return this.input.indexOf(identifier) === 0 && identifier;
}
private get closeTag() {
if (this.doesNotContain(closeTag_Re)) return false;
let closeTag = this.input.match(closeTag_Re)[0];
return this.input.indexOf(closeTag) === 0 && closeTag;
}
private get text() {
if (this.doesNotContain(text_Re)) return false;
let text = this.input.match(text_Re)[0];
return this.input.indexOf(text) === 0 && text;
}
private get whiteSpace() {
if (this.doesNotContain(/[ \t]+/)) return false;
let whiteSpace = this.input.match(/[ \t]+/)[0];
return this.input.indexOf(whiteSpace) === 0 && whiteSpace;
}
private get ifStatement() {
if (this.doesNotContain(ifStatement_Re)) return false;
let res = this.input.match(ifStatement_Re)[0];
return this.input.indexOf(res) === 0 && res;
}
private get ifStatement2() {
if (this.doesNotContain(ifStatement_Re_2)) return false;
let res = this.input.match(ifStatement_Re_2)[0];
return this.input.indexOf(res) === 0 && res;
}
private get elseIfStatement() {
if (!this.doesNotContain(elseIfStatement_Re)) {
let res = this.input.match(elseIfStatement_Re)[0];
return this.input.indexOf(res) === 0 && res;
}
if (!this.doesNotContain(elseIfStatement_Re_2)) {
let res = this.input.match(elseIfStatement_Re_2)[0];
return this.input.indexOf(res) === 0 && res;
}
return false;
}
private get elseStatement() {
if (!this.doesNotContain(elseStatement_Re)) {
let res = this.input.match(elseStatement_Re)[0];
return this.input.indexOf(res) === 0 && res;
}
if (this.input.search(elseStatement_Re_2) !== -1) {
let res = this.input.match(elseStatement_Re_2)[0];
return this.input.indexOf(res) === 0 && res;
}
return false;
}
private get forStatement() {
if (this.doesNotContain(forStatement_Re)) return false;
let forStatement = this.input.match(forStatement_Re)[0];
return this.input.indexOf(forStatement) === 0 && forStatement;
}
private get forStatement2() {
if (this.doesNotContain(forStatement_Re_2)) return false;
let forStatement = this.input.match(forStatement_Re_2)[0];
return this.input.indexOf(forStatement) === 0 && forStatement;
}
private get lexJSCode() {
if (this.doesNotContain(javascriptSrc_Reg)) return false;
let forStatement = this.input.match(forStatement_Re)[0];
return this.input.indexOf(forStatement) === 0 && forStatement;
}
private get forEach() {
if (this.doesNotContain(forEach_Re)) return false;
let foreach = this.input.match(forEach_Re)[0];
return this.input.indexOf(foreach) === 0 && foreach;
}
private get on() {
if (this.doesNotContain(on_Re)) return false;
let on = this.input.match(on_Re)[0];
return this.input.indexOf(on) === 0 && on;
}
public tokenize() {
return this.tokens;
}
private doesNotContain(arg: RegExp | string) {
return this.input.search(arg) === -1
}
}
|
Rem
Rem $Header: statsdrp.sql 13-aug-99.11:17:16 cdialeri Exp $
Rem
Rem statsdrp.sql
Rem
Rem Copyright (c) Oracle Corporation 1999. All Rights Reserved.
Rem
Rem NAME
Rem statsdrp.sql
Rem
Rem DESCRIPTION
Rem SQL*PLUS command file drop user, tables and package for
Rem performance diagnostic tool STATSPACK
Rem
Rem NOTES
Rem Note the script connects INTERNAL and so must be run from
Rem an account which is able to connect internal.
Rem
Rem MODIFIED (MM/DD/YY)
Rem cdialeri 08/13/99 - Drops entire STATSPACK environment
Rem cdialeri 08/13/99 - Created
Rem
--
-- Connect as PERFSTAT and drop the tables and indexes
alter user perfstat identified by perfstat;
connect perfstat/perfstat@&&1
@@statsdtab
--
-- Drop STATSPACK usr
connect sys/&&2@&&1
@@statsdusr
|
<gh_stars>0
-- phpMyAdmin SQL Dump
-- version 4.3.8
-- http://www.phpmyadmin.net
--
-- Host: localhost
-- Generation Time: Jun 23, 2016 at 12:17 AM
-- Server version: 5.5.42-37.1-log
-- PHP Version: 5.4.31
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
--
-- Database: `biroglos_simklinik`
--
-- --------------------------------------------------------
--
-- Table structure for table `bed`
--
CREATE TABLE IF NOT EXISTS `bed` (
`id` int(100) NOT NULL,
`nama_kamar` varchar(100) DEFAULT NULL,
`group_bed` varchar(100) DEFAULT NULL,
`tarif` varchar(100) DEFAULT NULL,
`fasilitas` varchar(100) DEFAULT NULL,
`status` varchar(50) DEFAULT NULL,
`kode_bed` varchar(100) NOT NULL,
`kelas` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=62 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `bidang_lab`
--
CREATE TABLE IF NOT EXISTS `bidang_lab` (
`id` int(100) NOT NULL,
`nama` varchar(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=10 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `detail_hutang`
--
CREATE TABLE IF NOT EXISTS `detail_hutang` (
`id` int(100) NOT NULL,
`no_faktur_pembayaran` varchar(100) NOT NULL,
`no_faktur_pembelian` varchar(100) NOT NULL,
`tanggal` date NOT NULL,
`jatuh_tempo` date DEFAULT NULL,
`sisa` int(100) NOT NULL,
`potongan` int(100) NOT NULL,
`total` int(100) NOT NULL,
`jumlah_bayar` int(100) NOT NULL,
`nama_suplier` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `detail_item_keluar`
--
CREATE TABLE IF NOT EXISTS `detail_item_keluar` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`kode_barang` varchar(100) NOT NULL,
`nama_barang` varchar(100) NOT NULL,
`jumlah` int(100) NOT NULL,
`tanggal` date NOT NULL,
`harga` int(100) NOT NULL,
`subtotal` int(100) NOT NULL,
`hpp` int(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=25 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `detail_item_masuk`
--
CREATE TABLE IF NOT EXISTS `detail_item_masuk` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`kode_barang` varchar(100) NOT NULL,
`nama_barang` varchar(100) NOT NULL,
`jumlah` int(100) NOT NULL,
`harga` int(100) NOT NULL,
`subtotal` int(100) NOT NULL,
`tanggal` date NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `detail_pembelian`
--
CREATE TABLE IF NOT EXISTS `detail_pembelian` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`kode_produk` varchar(100) NOT NULL,
`nama_produk` varchar(100) NOT NULL,
`tipe_produk` varchar(100) NOT NULL,
`jumlah_produk` int(100) NOT NULL,
`harga_produk` int(100) NOT NULL,
`subtotal` int(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`hpp` int(100) NOT NULL,
`sisa` int(100) NOT NULL,
`diskon` int(100) DEFAULT NULL,
`tax` int(100) DEFAULT NULL,
`suplier` varchar(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=288 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `detail_penjualan`
--
CREATE TABLE IF NOT EXISTS `detail_penjualan` (
`id` int(100) NOT NULL,
`no_reg` varchar(100) DEFAULT NULL,
`no_faktur` varchar(100) NOT NULL,
`no_rm` varchar(100) DEFAULT NULL,
`kode_produk` varchar(100) NOT NULL,
`nama_produk` varchar(100) NOT NULL,
`jumlah_produk` int(100) NOT NULL,
`tipe_produk` varchar(100) NOT NULL,
`dosis` varchar(100) DEFAULT NULL,
`harga_produk` int(100) NOT NULL,
`subtotal` int(100) NOT NULL,
`jam` time NOT NULL,
`tanggal` date NOT NULL,
`hpp` int(100) DEFAULT NULL,
`sisa` int(100) NOT NULL,
`diskon` int(100) DEFAULT NULL,
`tax` int(100) DEFAULT NULL,
`keterangan` text
) ENGINE=InnoDB AUTO_INCREMENT=19934 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `detail_piutang`
--
CREATE TABLE IF NOT EXISTS `detail_piutang` (
`id` int(100) NOT NULL,
`no_faktur_penjualan` varchar(100) NOT NULL,
`tanggal` date NOT NULL,
`tanggal_jt` date NOT NULL,
`sisa` int(100) NOT NULL,
`potongan` int(100) NOT NULL,
`total` int(100) NOT NULL,
`jumlah_bayar` int(100) NOT NULL,
`no_faktur_pembayaran` varchar(100) NOT NULL,
`penjamin` varchar(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `detail_retur_pembelian`
--
CREATE TABLE IF NOT EXISTS `detail_retur_pembelian` (
`id` int(100) NOT NULL,
`no_retur` varchar(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`kode_produk` varchar(100) NOT NULL,
`nama_produk` varchar(100) NOT NULL,
`jumlah_jual` int(100) NOT NULL,
`tipe_produk` varchar(100) NOT NULL,
`jumlah_retur` int(100) NOT NULL,
`harga` int(100) NOT NULL,
`subtotal` int(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`potongan` int(100) DEFAULT NULL,
`tax` int(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `detail_retur_penjualan`
--
CREATE TABLE IF NOT EXISTS `detail_retur_penjualan` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`kode_produk` varchar(100) NOT NULL,
`nama_produk` varchar(100) NOT NULL,
`jumlah_jual` int(100) NOT NULL,
`tipe_produk` varchar(100) NOT NULL,
`jumlah_retur` int(100) NOT NULL,
`harga` int(100) NOT NULL,
`subtotal` int(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`potongan` int(100) DEFAULT NULL,
`tax` int(100) DEFAULT NULL,
`no_retur` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `detail_stok_opname`
--
CREATE TABLE IF NOT EXISTS `detail_stok_opname` (
`id` int(100) NOT NULL,
`nomor` varchar(100) NOT NULL,
`kode_barang` varchar(100) NOT NULL,
`nama_barang` varchar(100) NOT NULL,
`harga_beli` int(100) NOT NULL,
`stok_terakhir` int(11) NOT NULL,
`fisik` varchar(100) NOT NULL,
`selisih` int(100) NOT NULL,
`selisih_harga` int(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`user` varchar(100) NOT NULL,
`hpp` int(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=90 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `dokter`
--
CREATE TABLE IF NOT EXISTS `dokter` (
`id` int(50) NOT NULL,
`nama` varchar(100) DEFAULT NULL,
`alamat` varchar(100) DEFAULT NULL,
`nik` varchar(50) DEFAULT NULL,
`tanggal_lahir` varchar(100) DEFAULT NULL,
`no_hp` varchar(100) NOT NULL,
`tempat_lahir` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `fee_pasien`
--
CREATE TABLE IF NOT EXISTS `fee_pasien` (
`id` int(100) NOT NULL,
`nama_petugas` varchar(100) NOT NULL,
`jabatan` varchar(100) DEFAULT NULL,
`user` varchar(100) NOT NULL,
`jumlah_prosentase` int(100) DEFAULT NULL,
`jumlah_uang` int(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `fee_produk`
--
CREATE TABLE IF NOT EXISTS `fee_produk` (
`id` int(100) NOT NULL,
`nama_petugas` varchar(100) NOT NULL,
`jabatan` varchar(100) DEFAULT NULL,
`user` varchar(100) NOT NULL,
`kode_produk` varchar(100) NOT NULL,
`nama_produk` varchar(100) NOT NULL,
`jumlah_prosentase` int(100) DEFAULT NULL,
`jumlah_uang` int(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=9187 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `group_bed`
--
CREATE TABLE IF NOT EXISTS `group_bed` (
`id` int(100) NOT NULL,
`nama` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=18 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `hpp_barang`
--
CREATE TABLE IF NOT EXISTS `hpp_barang` (
`id` int(100) NOT NULL,
`kode_barang` varchar(100) NOT NULL,
`jumlah_barang` int(100) NOT NULL,
`jenis` varchar(100) NOT NULL,
`harga_barang` int(100) NOT NULL,
`tanggal` date NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=288 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `hutang`
--
CREATE TABLE IF NOT EXISTS `hutang` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`nama_suplier` varchar(100) NOT NULL,
`tanggal` date NOT NULL,
`tanggal_edit` date DEFAULT NULL,
`petugas` varchar(100) NOT NULL,
`petugas_edit` varchar(100) DEFAULT NULL,
`keterangan` varchar(100) DEFAULT NULL,
`total` int(100) NOT NULL,
`ke_kas` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `icd`
--
CREATE TABLE IF NOT EXISTS `icd` (
`ICD` varchar(20) NOT NULL DEFAULT '',
`Deskripsi` varchar(250) NOT NULL DEFAULT '',
`DTD` varchar(20) NOT NULL DEFAULT '',
`Deskripsi_ina` varchar(250) NOT NULL DEFAULT '',
`not_used_1` varchar(20) NOT NULL DEFAULT '',
`not_used_2` varchar(20) NOT NULL DEFAULT '',
`not_used_3` varchar(20) NOT NULL DEFAULT '',
`not_used_4` varchar(20) NOT NULL DEFAULT '',
`not_used_5` varchar(20) NOT NULL DEFAULT '',
`not_used_6` varchar(20) NOT NULL DEFAULT '',
`not_used_7` varchar(20) NOT NULL DEFAULT '',
`not_used_8` varchar(20) NOT NULL DEFAULT ''
) ENGINE=InnoDB DEFAULT CHARSET=latin1 ROW_FORMAT=DYNAMIC;
-- --------------------------------------------------------
--
-- Table structure for table `item_keluar`
--
CREATE TABLE IF NOT EXISTS `item_keluar` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`user` varchar(100) NOT NULL,
`user_edit` varchar(100) DEFAULT NULL,
`tangal_edit` varchar(100) DEFAULT NULL,
`keterangan` varchar(100) DEFAULT NULL,
`total_hpp` int(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `item_masuk`
--
CREATE TABLE IF NOT EXISTS `item_masuk` (
`id` int(110) NOT NULL,
`no_faktur` varchar(110) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`user` varchar(110) DEFAULT NULL,
`user_edit` varchar(110) DEFAULT NULL,
`tangal_edit` date DEFAULT NULL,
`keterangan` varchar(110) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `jabatan`
--
CREATE TABLE IF NOT EXISTS `jabatan` (
`id` int(100) NOT NULL,
`nama` varchar(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=27 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `jasa_lab`
--
CREATE TABLE IF NOT EXISTS `jasa_lab` (
`id` int(100) NOT NULL,
`kode_lab` varchar(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`harga_1` int(100) NOT NULL,
`harga_2` int(100) NOT NULL,
`harga_3` int(100) NOT NULL,
`bidang` varchar(100) NOT NULL,
`persiapan` varchar(100) DEFAULT NULL,
`metode` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=20 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `jenis`
--
CREATE TABLE IF NOT EXISTS `jenis` (
`id` int(100) NOT NULL,
`nama` varchar(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=91 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `kabupaten`
--
CREATE TABLE IF NOT EXISTS `kabupaten` (
`id_kab` int(100) NOT NULL,
`id_prov` int(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`id_jenis` int(100) NOT NULL,
`status_pakai` int(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `kartu_stok`
--
CREATE TABLE IF NOT EXISTS `kartu_stok` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) DEFAULT NULL,
`tanggal` date NOT NULL,
`jumlah` int(100) NOT NULL,
`tipe` varchar(100) NOT NULL,
`pelanggan` varchar(100) DEFAULT NULL,
`kode_produk` varchar(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=2425 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `kas`
--
CREATE TABLE IF NOT EXISTS `kas` (
`id` int(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`total` int(100) NOT NULL,
`kategori` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `kas_keluar`
--
CREATE TABLE IF NOT EXISTS `kas_keluar` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) DEFAULT NULL,
`nama` varchar(100) NOT NULL,
`dari_akun` varchar(100) NOT NULL,
`ke_akun` varchar(100) NOT NULL,
`total` int(100) NOT NULL,
`petugas` varchar(100) NOT NULL,
`no_trx` varchar(100) NOT NULL,
`petugas_akhir` varchar(100) DEFAULT NULL,
`terakhir_edit` datetime DEFAULT NULL,
`tanggal` date NOT NULL,
`waktu_input` time NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=165 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `kas_masuk`
--
CREATE TABLE IF NOT EXISTS `kas_masuk` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`dari_akun` varchar(100) NOT NULL,
`ke_akun` varchar(100) NOT NULL,
`total` int(100) NOT NULL,
`petugas` varchar(100) NOT NULL,
`no_trx` varchar(100) NOT NULL,
`petugas_akhir` varchar(100) DEFAULT NULL,
`terakhir_edit` datetime DEFAULT NULL,
`tanggal` date NOT NULL,
`waktu_input` time NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=2634 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `kas_mutasi`
--
CREATE TABLE IF NOT EXISTS `kas_mutasi` (
`id` int(100) NOT NULL,
`no_trx` varchar(100) NOT NULL,
`tanggal` date NOT NULL,
`dari_kas` varchar(100) NOT NULL,
`mutasi_ke_kas` varchar(100) NOT NULL,
`jumlah` int(11) NOT NULL,
`keterangan` varchar(100) NOT NULL,
`petugas` varchar(100) NOT NULL,
`petugas_akhir` varchar(100) DEFAULT NULL,
`terakhir_edit` datetime DEFAULT NULL,
`waktu_input` time NOT NULL,
`nama` varchar(100) NOT NULL,
`total` int(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=16 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `kategori_keluar`
--
CREATE TABLE IF NOT EXISTS `kategori_keluar` (
`id` int(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`total` int(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=39 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `kategori_masuk`
--
CREATE TABLE IF NOT EXISTS `kategori_masuk` (
`id` int(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`total` int(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=18 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `kecamatan`
--
CREATE TABLE IF NOT EXISTS `kecamatan` (
`id_kec` int(100) NOT NULL,
`id_kab` int(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`status_pakai` int(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `kelurahan`
--
CREATE TABLE IF NOT EXISTS `kelurahan` (
`id_kel` char(10) NOT NULL,
`id_kec` char(6) DEFAULT NULL,
`nama` tinytext,
`id_jenis` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `laporan_fee_pasien`
--
CREATE TABLE IF NOT EXISTS `laporan_fee_pasien` (
`id` int(100) NOT NULL,
`no_reg` varchar(100) DEFAULT NULL,
`no_rm` varchar(100) DEFAULT NULL,
`no_faktur` varchar(100) NOT NULL,
`nama_petugas` varchar(100) NOT NULL,
`jumlah_fee` int(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`waktu` datetime DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `laporan_fee_produk`
--
CREATE TABLE IF NOT EXISTS `laporan_fee_produk` (
`id` int(100) NOT NULL,
`no_reg` varchar(100) DEFAULT NULL,
`no_faktur` varchar(100) NOT NULL,
`no_rm` varchar(100) DEFAULT NULL,
`jumlah_fee` int(100) NOT NULL,
`nama_petugas` varchar(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`nama_produk` varchar(100) NOT NULL,
`kode_produk` varchar(100) NOT NULL,
`waktu` datetime DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=6871 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `login`
--
CREATE TABLE IF NOT EXISTS `login` (
`id` int(50) NOT NULL,
`username` varchar(30) DEFAULT NULL,
`nama` varchar(30) DEFAULT NULL,
`password` varchar(30) DEFAULT NULL,
`otoritas` varchar(30) DEFAULT NULL,
`jabatan` varchar(50) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `pasien`
--
CREATE TABLE IF NOT EXISTS `pasien` (
`id` int(50) NOT NULL,
`no_rm` varchar(50) DEFAULT NULL,
`nama_lengkap` varchar(50) DEFAULT NULL,
`tempat_lahir` varchar(50) DEFAULT NULL,
`tanggal_lahir` date DEFAULT NULL,
`umur` varchar(50) DEFAULT NULL,
`alamat_sekarang` varchar(100) DEFAULT NULL,
`alamat_ktp` varchar(100) DEFAULT NULL,
`no_hp` varchar(50) DEFAULT NULL,
`no_ktp` varchar(50) DEFAULT NULL,
`nama_suamiortu` varchar(50) DEFAULT NULL,
`pekerjaan_suamiortu` varchar(50) DEFAULT NULL,
`nama_penanggungjawab` varchar(50) DEFAULT NULL,
`hubungan_dengan_pasien` varchar(50) DEFAULT NULL,
`alamat_penanggung` varchar(50) DEFAULT NULL,
`no_hp_penanggung` varchar(50) DEFAULT NULL,
`jenis_kelamin` varchar(50) DEFAULT NULL,
`pendidikan_terakhir` varchar(50) DEFAULT NULL,
`status_kawin` varchar(50) DEFAULT NULL,
`agama` varchar(50) DEFAULT NULL,
`penjamin` varchar(50) DEFAULT NULL,
`gol_darah` varchar(50) DEFAULT NULL,
`provinsi` varchar(100) DEFAULT NULL,
`kabupaten` varchar(100) DEFAULT NULL,
`kecamatan` varchar(100) DEFAULT NULL,
`kelurahan` varchar(100) DEFAULT NULL,
`tanggal` date DEFAULT NULL,
`alergi` varchar(100) DEFAULT NULL,
`no_kk` varchar(100) DEFAULT NULL,
`nama_kk` varchar(100) DEFAULT NULL,
`no_rm_lama` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=440633 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `pembelian`
--
CREATE TABLE IF NOT EXISTS `pembelian` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`total` int(100) NOT NULL,
`suplier` varchar(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`petugas` varchar(100) NOT NULL,
`status_pembayaran` varchar(100) NOT NULL,
`no_faktur_suplier` varchar(100) DEFAULT NULL,
`sisa` int(100) DEFAULT NULL,
`cara` varchar(100) DEFAULT NULL,
`jatuh_tempo` date DEFAULT NULL,
`diskon` int(100) DEFAULT NULL,
`tax` int(100) DEFAULT NULL,
`bayar` int(100) DEFAULT NULL,
`keterangan` varchar(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `penetapan_petugas`
--
CREATE TABLE IF NOT EXISTS `penetapan_petugas` (
`id` int(100) NOT NULL,
`nama_dokter` varchar(100) DEFAULT NULL,
`nama_paramedik` varchar(100) DEFAULT NULL,
`nama_farmasi` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `penjamin`
--
CREATE TABLE IF NOT EXISTS `penjamin` (
`id` int(50) NOT NULL,
`nama` varchar(50) DEFAULT NULL,
`alamat` text,
`no_telp` varchar(100) NOT NULL,
`harga` varchar(100) NOT NULL,
`cakupan_layanan` text,
`jatuh_tempo` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=61 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `penjualan`
--
CREATE TABLE IF NOT EXISTS `penjualan` (
`id` int(100) NOT NULL,
`no_rm` varchar(100) DEFAULT NULL,
`no_reg` varchar(100) DEFAULT NULL,
`no_faktur` varchar(100) NOT NULL,
`nama` varchar(100) DEFAULT NULL,
`dokter` varchar(100) DEFAULT NULL,
`penjamin` varchar(100) NOT NULL,
`tanggal` date NOT NULL,
`petugas` varchar(100) NOT NULL,
`total_penjualan` int(100) NOT NULL,
`jam` time NOT NULL,
`status_pembayaran` varchar(100) NOT NULL,
`total_hpp` int(100) DEFAULT NULL,
`jenis_penjualan` varchar(30) DEFAULT NULL,
`cara` varchar(100) NOT NULL,
`sisa` int(100) DEFAULT NULL,
`jatuh_tempo` date DEFAULT NULL,
`diskon` int(100) DEFAULT NULL,
`tax` int(100) DEFAULT NULL,
`bayar` int(100) DEFAULT NULL,
`keterangan` text,
`no_resep` varchar(100) DEFAULT NULL,
`resep_dokter` varchar(100) DEFAULT NULL,
`apoteker` varchar(100) DEFAULT NULL,
`perawat` varchar(100) DEFAULT NULL,
`petugas_lain` varchar(100) DEFAULT NULL,
`waktu` datetime DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `penjualan_lab`
--
CREATE TABLE IF NOT EXISTS `penjualan_lab` (
`id` int(100) NOT NULL,
`kode_lab` varchar(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`harga` int(100) NOT NULL,
`bidang` varchar(100) NOT NULL,
`no_rm` varchar(100) NOT NULL,
`subtotal` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`no_reg` varchar(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`status_pembayaran` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `penjualan_tbs`
--
CREATE TABLE IF NOT EXISTS `penjualan_tbs` (
`id` int(100) NOT NULL,
`no_reg` varchar(100) DEFAULT NULL,
`no_faktur` varchar(100) DEFAULT NULL,
`kode_produk` varchar(100) DEFAULT NULL,
`nama_produk` varchar(100) DEFAULT NULL,
`jumlah_produk` varchar(100) DEFAULT NULL,
`tipe_produk` varchar(100) DEFAULT NULL,
`harga_produk` int(100) DEFAULT NULL,
`subtotal` int(100) DEFAULT NULL,
`diskon` int(100) DEFAULT NULL,
`tax` int(100) DEFAULT NULL,
`hpp` int(100) DEFAULT NULL,
`jam` time DEFAULT NULL,
`tanggal` date DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=2775 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `perujuk`
--
CREATE TABLE IF NOT EXISTS `perujuk` (
`id` int(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`alamat` text NOT NULL,
`no_telp` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `perusahaan`
--
CREATE TABLE IF NOT EXISTS `perusahaan` (
`id` int(35) NOT NULL,
`nama` varchar(100) NOT NULL,
`alamat` text NOT NULL,
`singkatan_perusahaan` varchar(100) NOT NULL,
`foto` varchar(100) NOT NULL,
`no_telp` varchar(100) NOT NULL,
`no_fax` varchar(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `piutang`
--
CREATE TABLE IF NOT EXISTS `piutang` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`penjamin` varchar(100) DEFAULT NULL,
`petugas` varchar(100) NOT NULL,
`petugas_edit` varchar(100) DEFAULT NULL,
`tanggal` date NOT NULL,
`tanggal_edit` date DEFAULT NULL,
`keterangan` varchar(100) DEFAULT NULL,
`total` int(100) NOT NULL,
`ke_kas` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `poli`
--
CREATE TABLE IF NOT EXISTS `poli` (
`id` int(50) NOT NULL,
`nama` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=21 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `produk`
--
CREATE TABLE IF NOT EXISTS `produk` (
`id` int(100) NOT NULL,
`kode_produk` varchar(100) DEFAULT NULL,
`nama_produk` varchar(100) DEFAULT NULL,
`jenis_produk` varchar(100) DEFAULT NULL,
`tipe_produk` varchar(100) DEFAULT NULL,
`satuan_produk` varchar(100) DEFAULT NULL,
`harga_beli` int(100) DEFAULT NULL,
`stok_produk` int(100) DEFAULT NULL,
`harga_jual_1` int(100) DEFAULT NULL,
`harga_jual_2` int(100) DEFAULT NULL,
`harga_jual_3` int(100) DEFAULT NULL,
`suplier` varchar(100) DEFAULT NULL,
`total` int(100) DEFAULT NULL,
`status_stok_awal` varchar(100) DEFAULT NULL,
`limit_stok` varchar(100) DEFAULT NULL,
`over_stok` varchar(100) DEFAULT NULL,
`kategori` varchar(100) DEFAULT NULL,
`golongan` varchar(100) DEFAULT NULL,
`golongan_produk` varchar(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=1676 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `provinsi`
--
CREATE TABLE IF NOT EXISTS `provinsi` (
`id_prov` int(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`status_pakai` int(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `registrasi`
--
CREATE TABLE IF NOT EXISTS `registrasi` (
`id` int(100) NOT NULL,
`no_reg` varchar(255) DEFAULT NULL,
`no_rm` varchar(255) DEFAULT NULL,
`nama_pasien` varchar(100) DEFAULT NULL,
`alamat_pasien` varchar(100) DEFAULT NULL,
`hp_pasien` varchar(100) DEFAULT NULL,
`umur_pasien` varchar(100) DEFAULT NULL,
`jenis_kelamin` varchar(100) DEFAULT NULL,
`jenis_pasien` varchar(100) DEFAULT NULL,
`penjamin` varchar(100) DEFAULT NULL,
`tanggal` date DEFAULT NULL,
`jam` time DEFAULT NULL,
`tanggal_masuk` date DEFAULT NULL,
`tanggal_keluar` date DEFAULT NULL,
`kondisi` varchar(100) DEFAULT NULL,
`pengantar_pasien` varchar(100) DEFAULT NULL,
`nama_pengantar` varchar(100) DEFAULT NULL,
`alamat_pengantar` varchar(100) DEFAULT NULL,
`hp_pengantar` varchar(100) DEFAULT NULL,
`hubungan_dengan_pasien` varchar(100) DEFAULT NULL,
`penanggung_jawab` varchar(100) DEFAULT NULL,
`alamat_penanggung_jawab` varchar(100) DEFAULT NULL,
`hp_penanggung_jawab` varchar(100) DEFAULT NULL,
`pekerjaan_penanggung_jawab` varchar(100) DEFAULT NULL,
`status` varchar(100) DEFAULT NULL,
`keterangan` varchar(100) DEFAULT NULL,
`dokter` varchar(100) DEFAULT NULL,
`petugas` varchar(100) DEFAULT NULL,
`bed` varchar(100) DEFAULT NULL,
`group_bed` varchar(100) DEFAULT NULL,
`menginap` varchar(100) DEFAULT NULL,
`keadaan_pulang` varchar(100) DEFAULT NULL,
`poli` varchar(100) DEFAULT NULL,
`no_urut` int(100) DEFAULT NULL,
`rujukan` varchar(100) DEFAULT NULL,
`kelurahan` varchar(100) DEFAULT NULL,
`kecamatan` varchar(100) DEFAULT NULL,
`kabupaten` varchar(100) DEFAULT NULL,
`provinsi` varchar(100) DEFAULT NULL,
`dokter_jaga` varchar(100) DEFAULT NULL,
`dokter_pengirim` varchar(100) DEFAULT NULL,
`status_nikah` varchar(100) DEFAULT NULL,
`pekerjaan_pasien` varchar(100) DEFAULT NULL,
`gol_darah` varchar(100) DEFAULT NULL,
`surat_jaminan` varchar(100) NOT NULL,
`alergi` varchar(100) DEFAULT NULL,
`no_kk` varchar(100) DEFAULT NULL,
`nama_kk` varchar(100) DEFAULT NULL,
`eye` varchar(100) DEFAULT NULL,
`verbal` varchar(100) DEFAULT NULL,
`motorik` varchar(100) DEFAULT NULL,
`petugas_lain` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=2620 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `rekam_medik`
--
CREATE TABLE IF NOT EXISTS `rekam_medik` (
`id` int(100) NOT NULL,
`no_reg` varchar(100) NOT NULL,
`no_rm` varchar(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`alamat` text NOT NULL,
`umur` varchar(100) NOT NULL,
`jenis_kelamin` varchar(100) NOT NULL,
`sistole_distole` varchar(100) NOT NULL,
`suhu` varchar(100) NOT NULL,
`berat_badan` varchar(100) NOT NULL,
`tinggi_badan` varchar(100) NOT NULL,
`nadi` varchar(100) NOT NULL,
`respiratory` varchar(100) NOT NULL,
`poli` varchar(100) NOT NULL,
`anamnesa` varchar(100) DEFAULT NULL,
`pemeriksaan_fisik` varchar(100) DEFAULT NULL,
`keadaan_umum` varchar(100) DEFAULT NULL,
`kondisi_keluar` varchar(100) DEFAULT NULL,
`kesadaran` varchar(100) DEFAULT NULL,
`icd_utama` varchar(100) DEFAULT NULL,
`icd_penyerta` varchar(100) DEFAULT NULL,
`icd_komplikasi` varchar(100) DEFAULT NULL,
`kode_utama` varchar(100) DEFAULT NULL,
`kode_penyerta` varchar(100) DEFAULT NULL,
`kode_penyerta_tambahan` varchar(100) NOT NULL,
`icd_penyerta_tambahan` varchar(100) NOT NULL,
`kode_komplikasi` varchar(100) DEFAULT NULL,
`tanggal_periksa` varchar(100) NOT NULL,
`jam` time NOT NULL,
`dokter` varchar(100) NOT NULL,
`kondisi` varchar(100) NOT NULL,
`rujukan` varchar(100) NOT NULL,
`alergi` varchar(100) DEFAULT NULL,
`no_kk` varchar(100) DEFAULT NULL,
`nama_kk` varchar(100) DEFAULT NULL,
`status` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=2615 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `rekam_medik_inap`
--
CREATE TABLE IF NOT EXISTS `rekam_medik_inap` (
`id` int(100) NOT NULL,
`no_reg` varchar(100) NOT NULL,
`no_rm` varchar(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`alamat` text NOT NULL,
`umur` varchar(100) NOT NULL,
`jenis_kelamin` varchar(100) NOT NULL,
`sistole_distole` varchar(100) NOT NULL,
`suhu` varchar(100) NOT NULL,
`berat_badan` varchar(100) NOT NULL,
`tinggi_badan` varchar(100) NOT NULL,
`nadi` varchar(100) NOT NULL,
`respiratory` varchar(100) NOT NULL,
`poli` varchar(100) NOT NULL,
`anamnesa` varchar(100) DEFAULT NULL,
`pemeriksaan_fisik` varchar(100) DEFAULT NULL,
`keadaan_umum` varchar(100) DEFAULT NULL,
`kondisi_keluar` varchar(100) DEFAULT NULL,
`kesadaran` varchar(100) DEFAULT NULL,
`icd_utama` varchar(100) DEFAULT NULL,
`icd_penyerta` varchar(100) DEFAULT NULL,
`icd_penyerta_tambahan` varchar(100) NOT NULL,
`icd_komplikasi` varchar(100) DEFAULT NULL,
`kode_utama` varchar(100) DEFAULT NULL,
`kode_penyerta` varchar(100) DEFAULT NULL,
`kode_penyerta_tambahan` varchar(100) NOT NULL,
`kode_komplikasi` varchar(100) DEFAULT NULL,
`tanggal_periksa` date NOT NULL,
`jam` time NOT NULL,
`dokter` varchar(100) NOT NULL,
`kondisi` varchar(100) NOT NULL,
`rujukan` varchar(100) NOT NULL,
`dokter_penanggung_jawab` varchar(100) NOT NULL,
`bed` varchar(100) NOT NULL,
`alergi` varchar(100) DEFAULT NULL,
`group_bed` varchar(100) DEFAULT NULL,
`status` varchar(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `rekam_medik_ugd`
--
CREATE TABLE IF NOT EXISTS `rekam_medik_ugd` (
`id` int(100) NOT NULL,
`no_reg` varchar(100) DEFAULT NULL,
`no_rm` varchar(100) DEFAULT NULL,
`nama` varchar(100) DEFAULT NULL,
`alamat` text,
`umur` varchar(100) DEFAULT NULL,
`jenis_kelamin` varchar(100) DEFAULT NULL,
`sistole_distole` varchar(100) DEFAULT NULL,
`suhu` varchar(100) DEFAULT NULL,
`berat_badan` varchar(100) DEFAULT NULL,
`tinggi_badan` varchar(100) DEFAULT NULL,
`nadi` varchar(100) DEFAULT NULL,
`respiratory` varchar(100) DEFAULT NULL,
`poli` varchar(100) DEFAULT NULL,
`anamnesa` varchar(100) DEFAULT NULL,
`pemeriksaan_fisik` varchar(100) DEFAULT NULL,
`keadaan_umum` varchar(100) DEFAULT NULL,
`kondisi_keluar` varchar(100) DEFAULT NULL,
`kesadaran` varchar(100) DEFAULT NULL,
`icd_utama` varchar(100) DEFAULT NULL,
`icd_penyerta` varchar(100) DEFAULT NULL,
`icd_penyerta_tambahan` varchar(100) DEFAULT NULL,
`icd_komplikasi` varchar(100) DEFAULT NULL,
`kode_utama` varchar(100) DEFAULT NULL,
`kode_penyerta` varchar(100) DEFAULT NULL,
`kode_penyerta_tambahan` varchar(100) DEFAULT NULL,
`kode_komplikasi` varchar(100) DEFAULT NULL,
`jam` time DEFAULT NULL,
`tanggal` date DEFAULT NULL,
`dokter` varchar(100) DEFAULT NULL,
`kondisi` varchar(100) DEFAULT NULL,
`rujukan` varchar(100) DEFAULT NULL,
`pengantar` varchar(100) DEFAULT NULL,
`alergi` varchar(100) DEFAULT NULL,
`eye` varchar(100) DEFAULT NULL,
`verbal` varchar(100) DEFAULT NULL,
`motorik` varchar(100) DEFAULT NULL,
`status` varchar(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `retur_pembelian`
--
CREATE TABLE IF NOT EXISTS `retur_pembelian` (
`id` int(100) NOT NULL,
`no_retur` varchar(100) NOT NULL,
`nama_suplier` varchar(100) NOT NULL,
`keterangan` varchar(100) NOT NULL,
`total` int(100) NOT NULL,
`user` varchar(100) NOT NULL,
`user_edit` varchar(100) DEFAULT NULL,
`tanggal` date NOT NULL,
`tanggal_edit` date DEFAULT NULL,
`jam` time NOT NULL,
`cara` varchar(100) NOT NULL,
`bayar` int(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `retur_penjualan`
--
CREATE TABLE IF NOT EXISTS `retur_penjualan` (
`id` int(100) NOT NULL,
`no_rm` varchar(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`keterangan` varchar(100) DEFAULT NULL,
`total` int(100) NOT NULL,
`user` varchar(100) NOT NULL,
`user_edit` varchar(100) DEFAULT NULL,
`tanggal` date NOT NULL,
`tanggal_edit` date DEFAULT NULL,
`jam` time NOT NULL,
`no_return` varchar(100) NOT NULL,
`bayar` int(100) DEFAULT NULL,
`cara` varchar(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `satuan`
--
CREATE TABLE IF NOT EXISTS `satuan` (
`id` int(100) NOT NULL,
`nama` varchar(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=42 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `setting_printer`
--
CREATE TABLE IF NOT EXISTS `setting_printer` (
`id` int(100) NOT NULL,
`nama_print` varchar(100) DEFAULT NULL,
`status_print` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `setup_hasil`
--
CREATE TABLE IF NOT EXISTS `setup_hasil` (
`id` int(100) NOT NULL,
`text_hasil` varchar(100) NOT NULL,
`nama_pemeriksaan` varchar(100) NOT NULL,
`kelompok_pemeriksaan` varchar(100) DEFAULT NULL,
`model_hitung` varchar(100) NOT NULL,
`normal_lk` varchar(100) DEFAULT NULL,
`normal_pr` varchar(100) DEFAULT NULL,
`metode` varchar(100) DEFAULT NULL,
`kategori_index` varchar(100) NOT NULL,
`normal_lk2` varchar(100) DEFAULT NULL,
`normal_pr2` varchar(100) DEFAULT NULL,
`satuan_nilai_normal` varchar(100) DEFAULT NULL,
`text_reference` text
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `stok_awal`
--
CREATE TABLE IF NOT EXISTS `stok_awal` (
`id` int(100) NOT NULL,
`kode_barang` varchar(100) NOT NULL,
`nama_barang` varchar(100) NOT NULL,
`jumlah` int(100) NOT NULL,
`satuan` varchar(100) NOT NULL,
`harga` int(100) NOT NULL,
`total` int(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`user` varchar(100) NOT NULL,
`tanggal_edit` date DEFAULT NULL,
`user_edit` varchar(100) DEFAULT NULL,
`keterangan` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=154 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `stok_opname`
--
CREATE TABLE IF NOT EXISTS `stok_opname` (
`id` int(100) NOT NULL,
`nomor` varchar(100) NOT NULL,
`keterangan` varchar(100) DEFAULT NULL,
`jumlah_selisih_harga` int(100) NOT NULL,
`tanggal` date NOT NULL,
`user` varchar(100) NOT NULL,
`status` varchar(100) NOT NULL,
`total_hpp` int(100) NOT NULL,
`user_edit` varchar(100) DEFAULT NULL,
`tanggal_edit` date DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `suplier`
--
CREATE TABLE IF NOT EXISTS `suplier` (
`id` int(100) NOT NULL,
`kode_suplier` varchar(100) NOT NULL,
`nama_suplier` varchar(100) NOT NULL,
`no_telp` varchar(100) NOT NULL,
`alamat` text NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=27 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `tbs_fee_produk`
--
CREATE TABLE IF NOT EXISTS `tbs_fee_produk` (
`id` int(100) NOT NULL,
`no_reg` varchar(100) DEFAULT NULL,
`no_rm` varchar(100) DEFAULT NULL,
`no_faktur` varchar(100) NOT NULL,
`nama_petugas` varchar(100) NOT NULL,
`kode_produk` varchar(100) NOT NULL,
`nama_produk` varchar(100) NOT NULL,
`jumlah_fee` int(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`waktu` datetime DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=4305 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `tbs_hutang`
--
CREATE TABLE IF NOT EXISTS `tbs_hutang` (
`id` int(100) NOT NULL,
`no_faktur_pembayaran` varchar(100) NOT NULL,
`no_faktur_pembelian` varchar(100) NOT NULL,
`tanggal` date NOT NULL,
`jatuh_tempo` date DEFAULT NULL,
`sisa` int(100) NOT NULL,
`potongan` int(100) DEFAULT NULL,
`total` int(100) NOT NULL,
`jumlah_bayar` int(100) NOT NULL,
`nama_suplier` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `tbs_item_keluar`
--
CREATE TABLE IF NOT EXISTS `tbs_item_keluar` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`kode_produk` varchar(100) NOT NULL,
`nama_produk` varchar(100) NOT NULL,
`jenis_produk` varchar(100) NOT NULL,
`harga_produk` int(100) NOT NULL,
`jumlah_produk` int(100) NOT NULL,
`subtotal` int(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`hpp` int(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `tbs_item_masuk`
--
CREATE TABLE IF NOT EXISTS `tbs_item_masuk` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`kode_produk` varchar(100) NOT NULL,
`nama_produk` varchar(100) NOT NULL,
`jenis_produk` varchar(100) NOT NULL,
`harga_produk` int(100) NOT NULL,
`jumlah_produk` int(100) NOT NULL,
`subtotal` int(100) DEFAULT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `tbs_pembelian`
--
CREATE TABLE IF NOT EXISTS `tbs_pembelian` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`kode_produk` varchar(100) NOT NULL,
`nama_produk` varchar(100) NOT NULL,
`tipe_produk` varchar(100) NOT NULL,
`jumlah_produk` int(100) NOT NULL,
`harga_produk` int(100) NOT NULL,
`subtotal` int(100) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`diskon` int(100) DEFAULT NULL,
`tax` int(100) DEFAULT NULL,
`suplier` varchar(100) NOT NULL
) ENGINE=InnoDB AUTO_INCREMENT=422 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `tbs_penjualan`
--
CREATE TABLE IF NOT EXISTS `tbs_penjualan` (
`id` int(100) NOT NULL,
`no_reg` varchar(100) DEFAULT NULL,
`no_faktur` varchar(100) DEFAULT NULL,
`kode_produk` varchar(100) NOT NULL,
`nama_produk` varchar(100) NOT NULL,
`jumlah_produk` int(100) NOT NULL,
`tipe_produk` varchar(100) NOT NULL,
`harga_produk` int(100) NOT NULL,
`subtotal` int(100) NOT NULL,
`jam` time NOT NULL,
`tanggal` date NOT NULL,
`hpp` int(100) DEFAULT NULL,
`diskon` int(100) DEFAULT NULL,
`tax` int(100) DEFAULT NULL,
`dosis` varchar(100) DEFAULT NULL,
`session_id` varchar(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=15344 DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `tbs_piutang`
--
CREATE TABLE IF NOT EXISTS `tbs_piutang` (
`no_faktur_pembayaran` varchar(100) NOT NULL,
`no_faktur_penjualan` varchar(100) NOT NULL,
`tanggal` date NOT NULL,
`tanggal_jt` date NOT NULL,
`sisa` int(100) NOT NULL,
`potongan` int(100) DEFAULT NULL,
`total` int(100) NOT NULL,
`jumlah_total` int(100) NOT NULL,
`id` int(100) NOT NULL,
`penjamin` varchar(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `tbs_retur_pembelian`
--
CREATE TABLE IF NOT EXISTS `tbs_retur_pembelian` (
`id` int(110) NOT NULL,
`no_retur` varchar(110) NOT NULL,
`no_faktur` varchar(110) NOT NULL,
`kode_produk` varchar(110) NOT NULL,
`nama_produk` varchar(110) NOT NULL,
`jumlah_jual` int(110) NOT NULL,
`tipe_produk` varchar(110) NOT NULL,
`jumlah_retur` int(110) NOT NULL,
`harga` int(110) NOT NULL,
`subtotal` int(110) NOT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`potongan` int(100) DEFAULT NULL,
`hpp` int(100) DEFAULT NULL,
`tax` int(100) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `tbs_retur_penjualan`
--
CREATE TABLE IF NOT EXISTS `tbs_retur_penjualan` (
`id` int(100) NOT NULL,
`no_faktur` varchar(100) NOT NULL,
`kode_produk` varchar(100) NOT NULL,
`nama_produk` varchar(100) NOT NULL,
`jumlah_jual` int(100) NOT NULL,
`tipe_produk` varchar(100) NOT NULL,
`jumlah_retur` int(100) NOT NULL,
`harga` int(100) NOT NULL,
`subtotal` int(100) NOT NULL,
`jam` time NOT NULL,
`tanggal` date NOT NULL,
`potongan` int(100) DEFAULT NULL,
`tax` int(100) DEFAULT NULL,
`hpp` int(100) DEFAULT NULL,
`no_retur` varchar(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `tbs_stok_awal`
--
CREATE TABLE IF NOT EXISTS `tbs_stok_awal` (
`id` int(100) NOT NULL,
`kode_barang` varchar(100) NOT NULL,
`nama_barang` varchar(100) NOT NULL,
`jumlah` int(100) NOT NULL,
`satuan` varchar(100) NOT NULL,
`harga` int(100) NOT NULL,
`total` int(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `tbs_stok_opname`
--
CREATE TABLE IF NOT EXISTS `tbs_stok_opname` (
`id` int(100) NOT NULL,
`nomor` varchar(100) NOT NULL,
`kode_barang` varchar(100) NOT NULL,
`nama_barang` varchar(100) NOT NULL,
`harga_beli` int(100) NOT NULL,
`stok_terakhir` int(100) DEFAULT NULL,
`fisik` varchar(100) DEFAULT NULL,
`selisih` int(100) DEFAULT NULL,
`selisih_harga` int(100) DEFAULT NULL,
`tanggal` date NOT NULL,
`jam` time NOT NULL,
`user` varchar(100) NOT NULL,
`hpp` int(100) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `user`
--
CREATE TABLE IF NOT EXISTS `user` (
`id` int(100) NOT NULL,
`username` varchar(100) NOT NULL,
`password` varchar(100) NOT NULL,
`nama` varchar(100) NOT NULL,
`jabatan` varchar(100) NOT NULL,
`otoritas` varchar(100) NOT NULL,
`alamat` varchar(100) NOT NULL,
`nik` varchar(100) NOT NULL,
`tempat_lahir` varchar(100) NOT NULL,
`tanggal_lahir` date NOT NULL,
`no_hp` varchar(100) NOT NULL,
`status_pakai` int(100) DEFAULT NULL
) ENGINE=InnoDB AUTO_INCREMENT=302 DEFAULT CHARSET=latin1;
--
-- Indexes for dumped tables
--
--
-- Indexes for table `bed`
--
ALTER TABLE `bed`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `bidang_lab`
--
ALTER TABLE `bidang_lab`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `detail_hutang`
--
ALTER TABLE `detail_hutang`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `detail_item_keluar`
--
ALTER TABLE `detail_item_keluar`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `detail_item_masuk`
--
ALTER TABLE `detail_item_masuk`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `detail_pembelian`
--
ALTER TABLE `detail_pembelian`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `detail_penjualan`
--
ALTER TABLE `detail_penjualan`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `detail_piutang`
--
ALTER TABLE `detail_piutang`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `detail_retur_pembelian`
--
ALTER TABLE `detail_retur_pembelian`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `detail_retur_penjualan`
--
ALTER TABLE `detail_retur_penjualan`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `detail_stok_opname`
--
ALTER TABLE `detail_stok_opname`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `dokter`
--
ALTER TABLE `dokter`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `fee_pasien`
--
ALTER TABLE `fee_pasien`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `fee_produk`
--
ALTER TABLE `fee_produk`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `group_bed`
--
ALTER TABLE `group_bed`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `hpp_barang`
--
ALTER TABLE `hpp_barang`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `hutang`
--
ALTER TABLE `hutang`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `icd`
--
ALTER TABLE `icd`
ADD PRIMARY KEY (`ICD`,`DTD`), ADD KEY `NewIndex1` (`Deskripsi`,`Deskripsi_ina`);
--
-- Indexes for table `item_keluar`
--
ALTER TABLE `item_keluar`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `item_masuk`
--
ALTER TABLE `item_masuk`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `jabatan`
--
ALTER TABLE `jabatan`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `jasa_lab`
--
ALTER TABLE `jasa_lab`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `jenis`
--
ALTER TABLE `jenis`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `kabupaten`
--
ALTER TABLE `kabupaten`
ADD PRIMARY KEY (`id_kab`);
--
-- Indexes for table `kartu_stok`
--
ALTER TABLE `kartu_stok`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `kas`
--
ALTER TABLE `kas`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `kas_keluar`
--
ALTER TABLE `kas_keluar`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `kas_masuk`
--
ALTER TABLE `kas_masuk`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `kas_mutasi`
--
ALTER TABLE `kas_mutasi`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `kategori_keluar`
--
ALTER TABLE `kategori_keluar`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `kategori_masuk`
--
ALTER TABLE `kategori_masuk`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `kecamatan`
--
ALTER TABLE `kecamatan`
ADD PRIMARY KEY (`id_kec`);
--
-- Indexes for table `kelurahan`
--
ALTER TABLE `kelurahan`
ADD PRIMARY KEY (`id_kel`);
--
-- Indexes for table `laporan_fee_pasien`
--
ALTER TABLE `laporan_fee_pasien`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `laporan_fee_produk`
--
ALTER TABLE `laporan_fee_produk`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `login`
--
ALTER TABLE `login`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `pasien`
--
ALTER TABLE `pasien`
ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `no_rm` (`no_rm`);
--
-- Indexes for table `pembelian`
--
ALTER TABLE `pembelian`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `penetapan_petugas`
--
ALTER TABLE `penetapan_petugas`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `penjamin`
--
ALTER TABLE `penjamin`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `penjualan`
--
ALTER TABLE `penjualan`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `penjualan_lab`
--
ALTER TABLE `penjualan_lab`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `penjualan_tbs`
--
ALTER TABLE `penjualan_tbs`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `perujuk`
--
ALTER TABLE `perujuk`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `perusahaan`
--
ALTER TABLE `perusahaan`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `piutang`
--
ALTER TABLE `piutang`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `poli`
--
ALTER TABLE `poli`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `produk`
--
ALTER TABLE `produk`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `provinsi`
--
ALTER TABLE `provinsi`
ADD PRIMARY KEY (`id_prov`);
--
-- Indexes for table `registrasi`
--
ALTER TABLE `registrasi`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `rekam_medik`
--
ALTER TABLE `rekam_medik`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `rekam_medik_inap`
--
ALTER TABLE `rekam_medik_inap`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `rekam_medik_ugd`
--
ALTER TABLE `rekam_medik_ugd`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `retur_pembelian`
--
ALTER TABLE `retur_pembelian`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `retur_penjualan`
--
ALTER TABLE `retur_penjualan`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `satuan`
--
ALTER TABLE `satuan`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `setting_printer`
--
ALTER TABLE `setting_printer`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `setup_hasil`
--
ALTER TABLE `setup_hasil`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `stok_awal`
--
ALTER TABLE `stok_awal`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `stok_opname`
--
ALTER TABLE `stok_opname`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `suplier`
--
ALTER TABLE `suplier`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `tbs_fee_produk`
--
ALTER TABLE `tbs_fee_produk`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `tbs_hutang`
--
ALTER TABLE `tbs_hutang`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `tbs_item_keluar`
--
ALTER TABLE `tbs_item_keluar`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `tbs_item_masuk`
--
ALTER TABLE `tbs_item_masuk`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `tbs_pembelian`
--
ALTER TABLE `tbs_pembelian`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `tbs_penjualan`
--
ALTER TABLE `tbs_penjualan`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `tbs_piutang`
--
ALTER TABLE `tbs_piutang`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `tbs_retur_pembelian`
--
ALTER TABLE `tbs_retur_pembelian`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `tbs_retur_penjualan`
--
ALTER TABLE `tbs_retur_penjualan`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `tbs_stok_awal`
--
ALTER TABLE `tbs_stok_awal`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `tbs_stok_opname`
--
ALTER TABLE `tbs_stok_opname`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `user`
--
ALTER TABLE `user`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `bed`
--
ALTER TABLE `bed`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=62;
--
-- AUTO_INCREMENT for table `bidang_lab`
--
ALTER TABLE `bidang_lab`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=10;
--
-- AUTO_INCREMENT for table `detail_hutang`
--
ALTER TABLE `detail_hutang`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `detail_item_keluar`
--
ALTER TABLE `detail_item_keluar`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=25;
--
-- AUTO_INCREMENT for table `detail_item_masuk`
--
ALTER TABLE `detail_item_masuk`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `detail_pembelian`
--
ALTER TABLE `detail_pembelian`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=288;
--
-- AUTO_INCREMENT for table `detail_penjualan`
--
ALTER TABLE `detail_penjualan`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=19934;
--
-- AUTO_INCREMENT for table `detail_piutang`
--
ALTER TABLE `detail_piutang`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `detail_retur_pembelian`
--
ALTER TABLE `detail_retur_pembelian`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `detail_retur_penjualan`
--
ALTER TABLE `detail_retur_penjualan`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `detail_stok_opname`
--
ALTER TABLE `detail_stok_opname`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=90;
--
-- AUTO_INCREMENT for table `dokter`
--
ALTER TABLE `dokter`
MODIFY `id` int(50) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `fee_pasien`
--
ALTER TABLE `fee_pasien`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `fee_produk`
--
ALTER TABLE `fee_produk`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=9187;
--
-- AUTO_INCREMENT for table `group_bed`
--
ALTER TABLE `group_bed`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=18;
--
-- AUTO_INCREMENT for table `hpp_barang`
--
ALTER TABLE `hpp_barang`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=288;
--
-- AUTO_INCREMENT for table `hutang`
--
ALTER TABLE `hutang`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `item_keluar`
--
ALTER TABLE `item_keluar`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=6;
--
-- AUTO_INCREMENT for table `item_masuk`
--
ALTER TABLE `item_masuk`
MODIFY `id` int(110) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `jabatan`
--
ALTER TABLE `jabatan`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=27;
--
-- AUTO_INCREMENT for table `jasa_lab`
--
ALTER TABLE `jasa_lab`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=20;
--
-- AUTO_INCREMENT for table `jenis`
--
ALTER TABLE `jenis`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=91;
--
-- AUTO_INCREMENT for table `kartu_stok`
--
ALTER TABLE `kartu_stok`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2425;
--
-- AUTO_INCREMENT for table `kas`
--
ALTER TABLE `kas`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT for table `kas_keluar`
--
ALTER TABLE `kas_keluar`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=165;
--
-- AUTO_INCREMENT for table `kas_masuk`
--
ALTER TABLE `kas_masuk`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2634;
--
-- AUTO_INCREMENT for table `kas_mutasi`
--
ALTER TABLE `kas_mutasi`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=16;
--
-- AUTO_INCREMENT for table `kategori_keluar`
--
ALTER TABLE `kategori_keluar`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=39;
--
-- AUTO_INCREMENT for table `kategori_masuk`
--
ALTER TABLE `kategori_masuk`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=18;
--
-- AUTO_INCREMENT for table `laporan_fee_pasien`
--
ALTER TABLE `laporan_fee_pasien`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `laporan_fee_produk`
--
ALTER TABLE `laporan_fee_produk`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=6871;
--
-- AUTO_INCREMENT for table `login`
--
ALTER TABLE `login`
MODIFY `id` int(50) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `pasien`
--
ALTER TABLE `pasien`
MODIFY `id` int(50) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=440633;
--
-- AUTO_INCREMENT for table `penetapan_petugas`
--
ALTER TABLE `penetapan_petugas`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT for table `penjamin`
--
ALTER TABLE `penjamin`
MODIFY `id` int(50) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=61;
--
-- AUTO_INCREMENT for table `penjualan_lab`
--
ALTER TABLE `penjualan_lab`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT for table `penjualan_tbs`
--
ALTER TABLE `penjualan_tbs`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2775;
--
-- AUTO_INCREMENT for table `perujuk`
--
ALTER TABLE `perujuk`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `perusahaan`
--
ALTER TABLE `perusahaan`
MODIFY `id` int(35) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=7;
--
-- AUTO_INCREMENT for table `piutang`
--
ALTER TABLE `piutang`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `poli`
--
ALTER TABLE `poli`
MODIFY `id` int(50) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=21;
--
-- AUTO_INCREMENT for table `produk`
--
ALTER TABLE `produk`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=1676;
--
-- AUTO_INCREMENT for table `registrasi`
--
ALTER TABLE `registrasi`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2620;
--
-- AUTO_INCREMENT for table `rekam_medik`
--
ALTER TABLE `rekam_medik`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2615;
--
-- AUTO_INCREMENT for table `rekam_medik_inap`
--
ALTER TABLE `rekam_medik_inap`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `rekam_medik_ugd`
--
ALTER TABLE `rekam_medik_ugd`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `retur_pembelian`
--
ALTER TABLE `retur_pembelian`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `retur_penjualan`
--
ALTER TABLE `retur_penjualan`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `satuan`
--
ALTER TABLE `satuan`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=42;
--
-- AUTO_INCREMENT for table `setting_printer`
--
ALTER TABLE `setting_printer`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT for table `setup_hasil`
--
ALTER TABLE `setup_hasil`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `stok_awal`
--
ALTER TABLE `stok_awal`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=154;
--
-- AUTO_INCREMENT for table `stok_opname`
--
ALTER TABLE `stok_opname`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT for table `suplier`
--
ALTER TABLE `suplier`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=27;
--
-- AUTO_INCREMENT for table `tbs_fee_produk`
--
ALTER TABLE `tbs_fee_produk`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=4305;
--
-- AUTO_INCREMENT for table `tbs_hutang`
--
ALTER TABLE `tbs_hutang`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `tbs_item_keluar`
--
ALTER TABLE `tbs_item_keluar`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `tbs_item_masuk`
--
ALTER TABLE `tbs_item_masuk`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `tbs_pembelian`
--
ALTER TABLE `tbs_pembelian`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=422;
--
-- AUTO_INCREMENT for table `tbs_penjualan`
--
ALTER TABLE `tbs_penjualan`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=15344;
--
-- AUTO_INCREMENT for table `tbs_piutang`
--
ALTER TABLE `tbs_piutang`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `tbs_retur_pembelian`
--
ALTER TABLE `tbs_retur_pembelian`
MODIFY `id` int(110) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `tbs_retur_penjualan`
--
ALTER TABLE `tbs_retur_penjualan`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `tbs_stok_awal`
--
ALTER TABLE `tbs_stok_awal`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `tbs_stok_opname`
--
ALTER TABLE `tbs_stok_opname`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `user`
--
ALTER TABLE `user`
MODIFY `id` int(100) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=302;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
//Create 4 functions called add, subtract, multiply, and divide.
//Create them to allow a user to perform the name of the function to the two numbers and return the result.
function add(x,y)
{
return x + y
}
function subratct(x,y)
{
return x - y
}
function multiply(x,y)
{
return x*y
}
function divide(x,y)
{
return x/y
}
var num1 = parseInt(prompt("Enter your first number"));
var num2 = parseInt(prompt("Enter your second number"));
var input = prompt("Would you like to ADD, SUBTRACT, MULTIPLY, or DIVIDE your numbers?");
if (input === "ADD")
console.log(add(num1, num2));
else if (input === "SUBTRACT")
console.log(subratct(num1, num2));
else if (input === "MULTIPLY")
console.log(add(num1, num2));
else if (input === "DIVIDE")
console.log(divide(num1, num2));
|
# @author <NAME>
class Array
# @note Calculo del elemento maximo del array
# @return [Object] Objeto del mayor dato entre los comparados
def Max
self.max{|a, b| a.huella_nutricional <=> b.huella_nutricional}
end
# @note Aumento de los precios segun el indice
# @param precios Array con valores flotantes
# @return [Array] Vector con los valores aumentados
def high_prices(precios)
huella_maxima = self.Max.huella_nutricional
i = 0
nuevos_precios = []
self.each { |iter|
if huella_maxima = 1
nuevos_precios.push((precios[i] * 1.10).round(2))
elsif huella_maxima = 2
nuevos_precios.push((precios[i] * 1.25).round(2))
else
nuevos_precios.push((precios[i] * 1.50).round(2))
end
i += 1
}
return nuevos_precios
end
end
|
const arrow = document.getElementById('arrow');
window.addEventListener("scroll", () => {
var y = window.scrollY;
if (y >= 600){
arrow.classList.add('opacity');
return;
}
else{
arrow.classList.remove('opacity');
}
}); |
<filename>services/vsts.js
// https://docs.microsoft.com/en-us/vsts/pipelines/build/variables
// The docs indicate that SYSTEM_PULLREQUEST_SOURCEBRANCH and SYSTEM_PULLREQUEST_TARGETBRANCH are in the long format (e.g `refs/heads/master`) however tests show they are both in the short format (e.g. `master`)
module.exports = {
detect({env}) {
return Boolean(env.BUILD_BUILDURI);
},
configuration({env}) {
const pr = env.SYSTEM_PULLREQUEST_PULLREQUESTID;
const isPr = Boolean(pr);
return {
name: 'Visual Studio Team Services',
service: 'vsts',
commit: env.BUILD_SOURCEVERSION,
build: env.BUILD_BUILDNUMBER,
branch: isPr ? env.SYSTEM_PULLREQUEST_TARGETBRANCH : env.BUILD_SOURCEBRANCHNAME,
pr,
isPr,
prBranch: isPr ? env.SYSTEM_PULLREQUEST_SOURCEBRANCH : undefined,
root: env.BUILD_REPOSITORY_LOCALPATH,
};
},
};
|
/**
* Copyright 2016 IBM Corp. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const assert = require('chai').assert;
const getEnvCredentials = require('./utilities').getEnvCredentials;
const TwitterCrawler = require('../lib/twitter-crawler');
const log = require('winston');
describe('crawler', () => {
it('crawl user with twitter handle', () => {
const crawler = new TwitterCrawler(getEnvCredentials());
return crawler.getUser('ladygaga')
.then(user => assert.equal(user.screen_name, 'ladygaga', 'Obtained user'));
});
it('crawl user with twitter ID', () => {
const crawler = new TwitterCrawler(getEnvCredentials());
return crawler.getUser('19397785')
.then(user => assert.equal(user.screen_name, 'Oprah', 'Obtained user'));
});
it('crawl tweets with twitter handle and limit', () => {
const crawler = new TwitterCrawler(getEnvCredentials());
return crawler.getTweets('ladygaga', { limit: 300 })
.then(tweets => assert.equal(tweets.length, 300, 'Obtained tweets with upper limit'));
});
it('crawl tweets with twitter ID and limit', () => {
const crawler = new TwitterCrawler(getEnvCredentials());
return crawler.getTweets('19397785', { min_tweets : 1, limit: 50 })
.then(tweets => assert.equal(tweets.length, 50, 'Obtained tweets with upper limit'));
});
it('crawl tweets with parameters', () => {
const crawler = new TwitterCrawler(getEnvCredentials());
return crawler.getTweets({
screen_name : 'ladygaga',
count : 151,
exclude_replies : false,
include_rts : true
}, { limit: 30 })
.then(tweets => assert.equal(tweets.length, 30, 'Obtained tweets with upper limit'));
});
it('crawl user without tweets', () => {
const crawler = new TwitterCrawler(getEnvCredentials());
return crawler.getTweets('o', { limit: 10 })
.then(tweets => assert.equal(tweets.length, 0, 'Obtained tweets with upper limit'));
});
it('crawl user with less tweets than limit', () => {
const crawler = new TwitterCrawler(getEnvCredentials());
return crawler.getTweets('e', { limit: 300 })
.then(tweets => assert.isBelow(tweets.length, 300, 'Obtained tweets with upper limit'));
});
});
|
//=require ./vendors/jquery-1.8.2.min
//=require ./vendors/jquery-ui-1.8.23.min
//=require ./vendors/underscore-min
//=require ./vendors/backbone-min
//=require ./vendors/chosen
//=require ./vendors/jath
//=require ./vendors/moment
//=require ./vendors/jquery.clearsearch.js
//
//=require ./vendors/hogan-2.0.0.min
//= require_tree ./templates
//
//=require ./vendors/piecon
//
//=require pivotaltracker_api
//=require piro_storage
//
//=require piro_options
//
//= require_tree ./models
//= require_tree ./collections
//= require_tree ./views
//= require_tree ./routers |
./gradlew clean build bintrayUpload -PbintrayUser=gubaojian -PbintrayKey=fb3fa84de3315230e575ba114fc38f7e36148957 -PdryRun=false
|
<reponame>Cribstone/home-assistant<filename>homeassistant/components/device.py
"""
homeassistant.components.sun
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Provides functionality to keep track of devices.
"""
import logging
import threading
import os
import csv
import re
import json
from datetime import datetime, timedelta
import requests
import homeassistant as ha
import homeassistant.util as util
import homeassistant.external.pynetgear as pynetgear
DOMAIN_DEVICE_TRACKER = "device_tracker"
SERVICE_DEVICE_TRACKER_RELOAD = "reload_devices_csv"
STATE_CATEGORY_ALL_DEVICES = 'devices'
STATE_CATEGORY_FORMAT = 'devices.{}'
STATE_NOT_HOME = 'device_not_home'
STATE_HOME = 'device_home'
# After how much time do we consider a device not home if
# it does not show up on scans
TIME_SPAN_FOR_ERROR_IN_SCANNING = timedelta(minutes=1)
# Return cached results if last scan was less then this time ago
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=5)
# Filename to save known devices to
KNOWN_DEVICES_FILE = "known_devices.csv"
def get_categories(statemachine):
""" Returns the categories of devices that are being tracked in the
statemachine. """
return ha.get_grouped_state_cats(statemachine, STATE_CATEGORY_FORMAT,
False)
def get_ids(statemachine):
""" Returns the devices that are being tracked in the statemachine. """
return ha.get_grouped_state_cats(statemachine, STATE_CATEGORY_FORMAT, True)
def is_home(statemachine, device_id=None):
""" Returns if any or specified device is home. """
category = STATE_CATEGORY_FORMAT.format(device_id) if device_id \
else STATE_CATEGORY_ALL_DEVICES
return statemachine.is_state(category, STATE_HOME)
class DeviceTracker(object):
""" Class that tracks which devices are home and which are not. """
def __init__(self, bus, statemachine, device_scanner):
self.statemachine = statemachine
self.bus = bus
self.device_scanner = device_scanner
self.logger = logging.getLogger(__name__)
self.lock = threading.Lock()
# Dictionary to keep track of known devices and devices we track
self.known_devices = {}
# Did we encounter a valid known devices file
self.invalid_known_devices_file = False
self._read_known_devices_file()
ha.track_time_change(bus,
lambda time:
self.update_devices(
device_scanner.scan_devices()))
bus.register_service(DOMAIN_DEVICE_TRACKER,
SERVICE_DEVICE_TRACKER_RELOAD,
lambda service: self._read_known_devices_file())
self.update_devices(device_scanner.scan_devices())
@property
def device_state_categories(self):
""" Returns a set containing all categories
that are maintained for devices. """
return set([self.known_devices[device]['category'] for device
in self.known_devices
if self.known_devices[device]['track']])
def update_devices(self, found_devices):
""" Update device states based on the found devices. """
self.lock.acquire()
now = datetime.now()
temp_tracking_devices = [device for device in self.known_devices
if self.known_devices[device]['track']]
for device in found_devices:
# Are we tracking this device?
if device in temp_tracking_devices:
temp_tracking_devices.remove(device)
self.known_devices[device]['last_seen'] = now
self.statemachine.set_state(
self.known_devices[device]['category'], STATE_HOME)
# For all devices we did not find, set state to NH
# But only if they have been gone for longer then the error time span
# Because we do not want to have stuff happening when the device does
# not show up for 1 scan beacuse of reboot etc
for device in temp_tracking_devices:
if (now - self.known_devices[device]['last_seen'] >
TIME_SPAN_FOR_ERROR_IN_SCANNING):
self.statemachine.set_state(
self.known_devices[device]['category'],
STATE_NOT_HOME)
# Get the currently used statuses
states_of_devices = [self.statemachine.get_state(category)['state']
for category in self.device_state_categories]
# Update the all devices category
all_devices_state = (STATE_HOME if STATE_HOME
in states_of_devices else STATE_NOT_HOME)
self.statemachine.set_state(STATE_CATEGORY_ALL_DEVICES,
all_devices_state)
# If we come along any unknown devices we will write them to the
# known devices file but only if we did not encounter an invalid
# known devices file
if not self.invalid_known_devices_file:
unknown_devices = [device for device in found_devices
if device not in self.known_devices]
if len(unknown_devices) > 0:
try:
# If file does not exist we will write the header too
is_new_file = not os.path.isfile(KNOWN_DEVICES_FILE)
with open(KNOWN_DEVICES_FILE, 'a') as outp:
self.logger.info((
"DeviceTracker:Found {} new devices,"
" updating {}").format(len(unknown_devices),
KNOWN_DEVICES_FILE))
writer = csv.writer(outp)
if is_new_file:
writer.writerow(("device", "name", "track"))
for device in unknown_devices:
# See if the device scanner knows the name
temp_name = \
self.device_scanner.get_device_name(device)
name = temp_name if temp_name else "unknown_device"
writer.writerow((device, name, 0))
self.known_devices[device] = {'name': name,
'track': False}
except IOError:
self.logger.exception((
"DeviceTracker:Error updating {}"
"with {} new devices").format(
KNOWN_DEVICES_FILE, len(unknown_devices)))
self.lock.release()
def _read_known_devices_file(self):
""" Parse and process the known devices file. """
# Read known devices if file exists
if os.path.isfile(KNOWN_DEVICES_FILE):
self.lock.acquire()
known_devices = {}
with open(KNOWN_DEVICES_FILE) as inp:
default_last_seen = datetime(1990, 1, 1)
# Temp variable to keep track of which categories we use
# so we can ensure we have unique categories.
used_categories = []
try:
for row in csv.DictReader(inp):
device = row['device']
row['track'] = True if row['track'] == '1' else False
# If we track this device setup tracking variables
if row['track']:
row['last_seen'] = default_last_seen
# Make sure that each device is mapped
# to a unique category name
name = util.slugify(row['name']) if row['name'] \
else "unnamed_device"
tries = 0
suffix = ""
while True:
tries += 1
if tries > 1:
suffix = "_{}".format(tries)
category = STATE_CATEGORY_FORMAT.format(
name + suffix)
if category not in used_categories:
break
row['category'] = category
used_categories.append(category)
known_devices[device] = row
if len(known_devices) == 0:
self.logger.warning(
"No devices to track. Please update {}.".format(
KNOWN_DEVICES_FILE))
# Remove categories that are no longer maintained
new_categories = set([known_devices[device]['category']
for device in known_devices
if known_devices[device]['track']])
for category in \
self.device_state_categories - new_categories:
print "Removing ", category
self.statemachine.remove_category(category)
# File parsed, warnings given if necessary
# categories cleaned up, make it available
self.known_devices = known_devices
self.logger.info(
"DeviceTracker:Loaded devices from {}".format(
KNOWN_DEVICES_FILE))
except KeyError:
self.invalid_known_devices_file = True
self.logger.warning((
"Invalid {} found. "
"We won't update it with new found devices.").
format(KNOWN_DEVICES_FILE))
finally:
self.lock.release()
class TomatoDeviceScanner(object):
""" This class queries a wireless router running Tomato firmware
for connected devices.
A description of the Tomato API can be found on
http://paulusschoutsen.nl/blog/2013/10/tomato-api-documentation/
"""
def __init__(self, host, username, password, http_id):
self.req = requests.Request('POST',
'http://{}/update.cgi'.format(host),
data={'_http_id': http_id,
'exec': 'devlist'},
auth=requests.auth.HTTPBasicAuth(
username, password)).prepare()
self.parse_api_pattern = re.compile(r"(?P<param>\w*) = (?P<value>.*);")
self.logger = logging.getLogger(__name__)
self.lock = threading.Lock()
self.date_updated = None
self.last_results = {"wldev": [], "dhcpd_lease": []}
self.success_init = self._update_tomato_info()
def scan_devices(self):
""" Scans for new devices and return a
list containing found device ids. """
self._update_tomato_info()
return [item[1] for item in self.last_results['wldev']]
def get_device_name(self, device):
""" Returns the name of the given device or None if we don't know. """
# Make sure there are results
if not self.date_updated:
self._update_tomato_info()
filter_named = [item[0] for item in self.last_results['dhcpd_lease']
if item[2] == device]
if len(filter_named) == 0 or filter_named[0] == "":
return None
else:
return filter_named[0]
def _update_tomato_info(self):
""" Ensures the information from the Tomato router is up to date.
Returns boolean if scanning successful. """
self.lock.acquire()
# if date_updated is None or the date is too old we scan for new data
if (not self.date_updated or datetime.now() - self.date_updated >
MIN_TIME_BETWEEN_SCANS):
self.logger.info("Tomato:Scanning")
try:
response = requests.Session().send(self.req, timeout=3)
# Calling and parsing the Tomato api here. We only need the
# wldev and dhcpd_lease values. For API description see:
# http://paulusschoutsen.nl/
# blog/2013/10/tomato-api-documentation/
if response.status_code == 200:
for param, value in \
self.parse_api_pattern.findall(response.text):
if param == 'wldev' or param == 'dhcpd_lease':
self.last_results[param] = \
json.loads(value.replace("'", '"'))
self.date_updated = datetime.now()
return True
elif response.status_code == 401:
# Authentication error
self.logger.exception((
"Tomato:Failed to authenticate, "
"please check your username and password"))
return False
except requests.exceptions.ConnectionError:
# We get this if we could not connect to the router or
# an invalid http_id was supplied
self.logger.exception((
"Tomato:Failed to connect to the router"
" or invalid http_id supplied"))
return False
except requests.exceptions.Timeout:
# We get this if we could not connect to the router or
# an invalid http_id was supplied
self.logger.exception(
"Tomato:Connection to the router timed out")
return False
except ValueError:
# If json decoder could not parse the response
self.logger.exception(
"Tomato:Failed to parse response from router")
return False
finally:
self.lock.release()
else:
# We acquired the lock before the IF check,
# release it before we return True
self.lock.release()
return True
class NetgearDeviceScanner(object):
""" This class queries a Netgear wireless router using the SOAP-api. """
def __init__(self, host, username, password):
self._api = pynetgear.Netgear(host, username, password)
self.logger = logging.getLogger(__name__)
self.lock = threading.Lock()
self.date_updated = None
self.last_results = []
self.success_init = self._update_info()
def scan_devices(self):
""" Scans for new devices and return a
list containing found device ids. """
self._update_info()
return [device.mac for device in self.last_results]
def get_device_name(self, mac):
""" Returns the name of the given device or None if we don't know. """
# Make sure there are results
if not self.date_updated:
self._update_info()
filter_named = [device.name for device in self.last_results
if device.mac == mac]
if len(filter_named) == 0:
return None
else:
return filter_named[0]
def _update_info(self):
""" Retrieves latest information from the Netgear router.
Returns boolean if scanning successful. """
self.lock.acquire()
# if date_updated is None or the date is too old we scan for new data
if (not self.date_updated or datetime.now() - self.date_updated >
MIN_TIME_BETWEEN_SCANS):
self.logger.info("Netgear:Scanning")
self.last_results = self._api.get_attached_devices()
self.lock.release()
return True
else:
# We acquired the lock before the IF check,
# release it before we return True
self.lock.release()
return True
|
<reponame>quenquen147/fortool-app
import React, { Component } from 'react';
import { View } from 'react-native';
import { Root } from "native-base";
import { createAppContainer } from 'react-navigation';
import { createStackNavigator } from 'react-navigation-stack';
import { Ionicons } from '@expo/vector-icons';
import * as Font from 'expo-font';
import { AppLoading } from "expo";
import { StatusBar } from 'react-native';
import LoginScreen from './pages/LoginForm';
import RegisterScreen from './pages/RegisterForm';
import ForgotPasswordScreeen from './pages/ForgotPassword';
import HomeScreen from './pages/Home';
import WinScreen from './pages/WinTab';
import GamesScreen from './pages/GamesTab';
import ShopScreen from './pages/ShopTab';
import ProfilScreen from './pages/ProfilTab';
class App extends Component {
constructor(props) {
super(props);
this.state = { loading: true };
}
async componentWillMount() {
await Font.loadAsync({
Roboto: require("native-base/Fonts/Roboto.ttf"),
Roboto_medium: require("native-base/Fonts/Roboto_medium.ttf")
});
this.setState({ loading: false });
}
render() {
if (this.state.loading) {
return (
<Root>
<AppLoading />
</Root>
);
}
return (
<Root>
<AppNavigator />
</Root>
);
}
}
const AppNavigator = createStackNavigator(
{
Login: {
screen: LoginScreen,
navigationOptions: {
title: 'Se connecter',
header: null //this will hide the header
},
},
ForgotPsw: ForgotPasswordScreeen,
Register: {
screen: RegisterScreen,
navigationOptions: {
title: 'Inscription',
header: null //this will hide the header
},
},
Home: {
screen: HomeScreen,
navigationOptions: {
title: 'Accueil',
header: null //this will hide the header
},
},
Win: {
screen: WinScreen,
navigationOptions: {
title: 'Gagner',
header: null //this will hide the header
},
},
Games: {
screen: GamesScreen,
navigationOptions: {
title: 'Jouer',
header: null //this will hide the header
},
},
Shop: {
screen: ShopScreen,
navigationOptions: {
title: 'Boutique',
header: null //this will hide the header
},
},
Profil: {
screen: ProfilScreen,
navigationOptions: {
title: 'Profil',
header: null //this will hide the header
},
},
},
{
headerMode: 'none',
transitionConfig: () => ({
transitionSpec: {
duration: 0, // Set the animation duration time as 0 !!
},
}),
navigationOptions: {
headerVisible: false,
gesturesEnabled: false
},
initialRouteName: 'Login',
}
);
export default createAppContainer(AppNavigator); |
source ${0:A:h}/tests.zsh
|
#! /bin/sh
flask db migrate
flask db upgrade
python3 -m flask run --host=0.0.0.0 --port=8000
|
<reponame>nabeelkhan/Oracle-DBA-Life
-- ***************************************************************************
-- File: 5_24a.sql
--
-- Developed By TUSC
--
-- Disclaimer: Neither Osborne/McGraw-Hill, TUSC, nor the author warrant
-- that this source code is error-free. If any errors are
-- found in this source code, please report them to TUSC at
-- (630)960-2909 ext 1011 or <EMAIL>.
-- ***************************************************************************
SPOOL 5_24a.lis
CREATE OR REPLACE PACKAGE BODY inline_pkg IS
-----------------------------------------------------------------
FUNCTION customer_csz (p_cust_id_num s_customer.customer_id%TYPE)
RETURN VARCHAR2 IS
CURSOR cur_customer IS
SELECT RTRIM(city || ', '|| state) ||
RTRIM(' ' || zip_code) ||
RTRIM(' ' || country)
FROM s_customer
WHERE customer_id = p_cust_id_num;
lv_citystatezip_txt VARCHAR2(75);
BEGIN
-- Fetch the already formatted string for the customer
OPEN cur_customer;
FETCH cur_customer INTO lv_citystatezip_txt;
CLOSE cur_customer;
RETURN lv_citystatezip_txt;
END customer_csz;
-----------------------------------------------------------------
FUNCTION customer_csz (p_cust_name_txt s_customer.customer_name%TYPE)
RETURN VARCHAR2 IS
CURSOR cur_customer IS
SELECT RTRIM(city || ', '|| state) ||
RTRIM(' ' || zip_code) ||
RTRIM(' ' || country)
FROM s_customer
WHERE customer_name = p_cust_name_txt;
lv_citystatezip_txt VARCHAR2(75);
BEGIN
-- Fetch the already formatted string for the customer
OPEN cur_customer;
FETCH cur_customer INTO lv_citystatezip_txt;
CLOSE cur_customer;
RETURN lv_citystatezip_txt;
END customer_csz;
-----------------------------------------------------------------
END inline_pkg;
/
SPOOL OFF
|
package logical
import (
"fmt"
md "github.com/jacobsimpson/mtsql/metadata"
)
type Operation interface {
Children() []Operation
Clone(...Operation) Operation
Provides() []*md.Column
Requires() []*md.Column
String() string
}
type Difference struct {
LHS Operation
RHS Operation
}
type Intersection struct {
LHS Operation
RHS Operation
}
type Product struct {
LHS Operation
RHS Operation
}
type Union struct {
LHS Operation
RHS Operation
}
type Selection struct {
Child Operation
requires []*md.Column
}
func NewSelection(child Operation, requires []*md.Column) *Selection {
return &Selection{
Child: child,
requires: requires,
}
}
type Projection struct {
Child Operation
columns []*md.Column
}
func NewProjection(child Operation, columns []*md.Column) *Projection {
return &Projection{
Child: child,
columns: columns,
}
}
type Distinct struct {
Child Operation
}
type Sort struct {
Child Operation
}
type Source struct {
Name string
Relation *md.Relation
}
func (o *Union) Children() []Operation {
return []Operation{o.LHS, o.RHS}
}
func (o *Union) Clone(children ...Operation) Operation {
if len(children) != 2 {
panic("wrong number of children")
}
return &Union{
LHS: children[0],
RHS: children[1],
}
}
func (o *Union) String() string {
return fmt.Sprintf("Union{LHS: %s, RHS: %s}", o.LHS, o.RHS)
}
func (o *Union) Provides() []*md.Column { return o.LHS.Provides() }
func (o *Union) Requires() []*md.Column { return []*md.Column{} }
func (o *Intersection) Children() []Operation {
return []Operation{o.LHS, o.RHS}
}
func (o *Intersection) Clone(children ...Operation) Operation {
if len(children) != 2 {
panic("wrong number of children")
}
return &Intersection{
LHS: children[0],
RHS: children[1],
}
}
func (o *Intersection) String() string {
return fmt.Sprintf("Intersection{LHS: %s, RHS: %s}", o.LHS, o.RHS)
}
func (o *Intersection) Provides() []*md.Column { return o.LHS.Provides() }
func (o *Intersection) Requires() []*md.Column { return []*md.Column{} }
func (o *Difference) Children() []Operation {
return []Operation{o.LHS, o.RHS}
}
func (o *Difference) Clone(children ...Operation) Operation {
if len(children) != 2 {
panic("wrong number of children")
}
return &Difference{
LHS: children[0],
RHS: children[1],
}
}
func (o *Difference) String() string {
return fmt.Sprintf("Difference{LHS: %s, RHS: %s}", o.LHS, o.RHS)
}
func (o *Difference) Provides() []*md.Column { return o.LHS.Provides() }
func (o *Difference) Requires() []*md.Column { return []*md.Column{} }
func (o *Selection) Children() []Operation {
return []Operation{o.Child}
}
func (o *Selection) Clone(children ...Operation) Operation {
if len(children) != 1 {
panic("wrong number of children")
}
return &Selection{
Child: children[0],
requires: o.requires,
}
}
func (o *Selection) String() string {
return fmt.Sprintf("Selection{Child: %s}", o.Child)
}
func (o *Selection) Provides() []*md.Column { return o.Child.Provides() }
func (o *Selection) Requires() []*md.Column { return o.requires }
func (o *Projection) Children() []Operation {
return []Operation{o.Child}
}
func (o *Projection) Clone(children ...Operation) Operation {
if len(children) != 1 {
panic("wrong number of children")
}
return &Projection{
Child: children[0],
columns: o.columns,
}
}
func (o *Projection) String() string {
return fmt.Sprintf("Projection{Child: %s}", o.Child)
}
func (o *Projection) Provides() []*md.Column { return o.columns }
func (o *Projection) Requires() []*md.Column { return o.columns }
func (o *Product) Children() []Operation {
return []Operation{o.LHS, o.RHS}
}
func (o *Product) Clone(children ...Operation) Operation {
if len(children) != 2 {
panic("wrong number of children")
}
return &Product{
LHS: children[0],
RHS: children[1],
}
}
func (o *Product) String() string {
return fmt.Sprintf("Product{LHS: %s, RHS: %s}", o.LHS, o.RHS)
}
func (o *Product) Provides() []*md.Column { return append(o.LHS.Provides(), o.RHS.Provides()...) }
func (o *Product) Requires() []*md.Column { return []*md.Column{} }
func (o *Distinct) Children() []Operation {
return []Operation{o.Child}
}
func (o *Distinct) Clone(children ...Operation) Operation {
if len(children) != 1 {
panic("wrong number of children")
}
return &Distinct{Child: children[0]}
}
func (o *Distinct) String() string {
return fmt.Sprintf("Distinct{Child: %s}", o.Child)
}
func (o *Distinct) Provides() []*md.Column { return o.Child.Provides() }
func (o *Distinct) Requires() []*md.Column { return []*md.Column{} }
func (o *Sort) Children() []Operation {
return []Operation{o.Child}
}
func (o *Sort) Clone(children ...Operation) Operation {
if len(children) != 1 {
panic("wrong number of children")
}
return &Sort{Child: children[0]}
}
func (o *Sort) String() string {
return fmt.Sprintf("Sort{Child: %s}", o.Child)
}
func (o *Sort) Provides() []*md.Column { return o.Child.Provides() }
func (o *Sort) Requires() []*md.Column { return []*md.Column{} }
func (o *Source) Children() []Operation {
return []Operation{}
}
func (o *Source) Clone(children ...Operation) Operation {
if len(children) != 0 {
panic("wrong number of children")
}
return &Source{
Name: o.Name,
Relation: o.Relation,
}
}
func (o *Source) String() string {
return fmt.Sprintf("Source{Name: %q, relation: %s}", o.Name, o.Relation)
}
func (o *Source) Provides() []*md.Column {
if o.Relation == nil {
return []*md.Column{}
}
return o.Relation.Columns
}
func (o *Source) Requires() []*md.Column { return []*md.Column{} }
|
package com.momo.mapper.mapper.manual;
import com.momo.mapper.dataobject.LoginLogDO;
public interface LoginLogMapper {
int insertSelective(LoginLogDO loginLogDO);
}
|
import Big from 'big.js';
const operate = (numberOne, numberTwo, operation) => {
const a = numberOne ? Big(numberOne) : Big('0');
const b = numberTwo ? Big(numberTwo) : Big('0');
switch (operation) {
case '+':
return a.plus(b).toString();
case '-':
return a.minus(b).toString();
case '×':
return a.times(b).toString();
case '÷':
if (b.eq('0')) {
return 'Error - Division by 0';
}
return a.div(b).toString();
case '%':
return a.times(b.div(100)).toString();
// no default
}
return {};
};
export default operate;
|
import React from 'react';
import { MDBBtn, MDBCard, MDBCardBody, MDBCardImage, MDBCardTitle, MDBCardText, MDBCol, MDBRow } from 'mdbreact';
const Card = (props) => {
const onClickHandler = (e) => {
if(props.saved){
props.bookUnsave(e)
}else{
props.bookSave(e)
}
}
return (
<MDBCol className="col-12">
<MDBCard className="mb-3">
<MDBCardBody>
<div className="d-flex flex-row">
<MDBCardTitle className="flex-grow-1">{props.title}</MDBCardTitle>
<div>
<MDBBtn size="sm" className="p-2 rounded-pill" href={props.link} target="_blank"><i className="fas fa-book-open"></i></MDBBtn>
</div>
{/* saved button */}
<div>
<MDBBtn
onClick={(e) => {onClickHandler(e)}}
id={props.id}
size="sm"
className="p-2 rounded-pill" >
<i className={props.saved ? "fas fa-bookmark" : "far fa-bookmark"} id={props.id}></i>
</MDBBtn>
</div>
</div>
<MDBCardText>
{props.author}
</MDBCardText>
<MDBRow>
<MDBCol className="col-12 col-md-2">
<MDBCardImage className="img-fluid" src={props.image} waves />
</MDBCol>
<MDBCol className="col-12 col-md-10">
<MDBCardText>
{props.description}
</MDBCardText>
</MDBCol>
</MDBRow>
</MDBCardBody>
</MDBCard>
</MDBCol>
)
}
export default Card; |
#!/bin/bash
set -o nounset
set -o errexit
cd "$(dirname "$0")"
mkdir -p $PWD/../data/lib/plugins/editors/
mkdir -p $PWD/../data/lib/plugins/tools/kitPlugins/
cp $BIN_DIR/librobots-pioneer-kit.so* $PWD/../data/lib/
cp $BIN_DIR/plugins/editors/libpioneerMetamodel.so $PWD/../data/lib/plugins/editors/
cp $BIN_DIR/plugins/tools/kitPlugins/librobots-pioneer-interpreter.so $PWD/../data/lib/plugins/tools/kitPlugins/
cp $BIN_DIR/plugins/tools/kitPlugins/librobots-pioneer-lua-generator.so $PWD/../data/lib/plugins/tools/kitPlugins/
|
import data from '../../docs/default-firebase-data.json';
import { allKeys } from './utils';
describe('video', () => {
it('matches the shape of the default data', () => {
const videos = Object.values(data['videos']);
const keys = ['speakers', 'thumbnail', 'title', 'youtubeId'];
expect(videos).toHaveLength(22);
expect(allKeys(videos)).toStrictEqual(keys);
});
});
//# sourceMappingURL=video.test.js.map |
#!/bin/bash
usage() {
echo " Usage: $0 HOSTNAME"
exit 1
}
# testar parametros
[ -z $1 ] && usage
# testar sintaxe valida
#if [[ "$1" =~ [^a-z0-9-] ]]; then
# echo " HOSTNAME must be lowercase alphanumeric: [a-z0-9]*"
if [[ "$1" =~ [^a-z,A-Z,0-9-] ]]; then
echo " HOSTNAME constains non-alphanumeric characters"
usage
elif [ ${#1} -gt 63 ]; then
echo " HOSTNAME must have <63 chars"
usage
fi
# alterar hostname local
chost="$( hostname -s )"
iphost_old="$( ip r s | tail -n1 | awk '{print $1}' )"
iphost="$( head -n2 /etc/hosts | tail -n1 | awk '{print $NF}' )"
sed -i "s/${chost}/${1}/g" /etc/hosts
sed -i "s/${iphost}/${iphost_old}/g" /etc/hosts
sed -i "s/${chost}/${1}/g" /etc/hostname
export HOSTNAME=${1}
invoke-rc.d hostname.sh restart
invoke-rc.d networking force-reload
hostnamectl set-hostname $1
# re-gerar chaves SSH
rm -f /etc/ssh/ssh_host_* 2> /dev/null
dpkg-reconfigure openssh-server --force &> /dev/null
# mensagem final
echo -n "Apenas realize um novo login, para corrigir o hostname do prompt!"
|
#!/bin/bash
# ========== Experiment Seq. Idx. 1645 / 30.2.1.0 / N. 0 - _S=30.2.1.0 D1_N=10 a=-1 b=1 c=-1 d=1 e=-1 f=-1 D3_N=4 g=1 h=-1 i=-1 D4_N=1 j=1 D5_N=0 ==========
set -u
# Prints header
echo -e '\n\n========== Experiment Seq. Idx. 1645 / 30.2.1.0 / N. 0 - _S=30.2.1.0 D1_N=10 a=-1 b=1 c=-1 d=1 e=-1 f=-1 D3_N=4 g=1 h=-1 i=-1 D4_N=1 j=1 D5_N=0 ==========\n\n'
# Prepares all environment variables
JBHI_DIR="$HOME/jbhi-special-issue"
RESULTS_DIR="$JBHI_DIR/results"
if [[ "No" == "Yes" ]]; then
SVM_SUFFIX="svm"
PREDICTIONS_FORMAT="isbi"
else
SVM_SUFFIX="nosvm"
PREDICTIONS_FORMAT="titans"
fi
RESULTS_PREFIX="$RESULTS_DIR/deep.10.layer.4.test.1.index.1645.$SVM_SUFFIX"
RESULTS_PATH="$RESULTS_PREFIX.results.txt"
# ...variables expected by jbhi-checks.include.sh and jbhi-footer.include.sh
SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
LIST_OF_INPUTS="$RESULTS_PREFIX.finish.txt"
# ...this experiment is a little different --- only one master procedure should run, so there's only a master lock file
METRICS_TEMP_PATH="$RESULTS_DIR/this_results.anova.txt"
METRICS_PATH="$RESULTS_DIR/all_results.anova.txt"
START_PATH="$METRICS_PATH.start.txt"
FINISH_PATH="-"
LOCK_PATH="$METRICS_PATH.running.lock"
LAST_OUTPUT="$METRICS_PATH"
mkdir -p "$RESULTS_DIR"
#
# Assumes that the following environment variables where initialized
# SOURCES_GIT_DIR="$JBHI_DIR/jbhi-special-issue"
# LIST_OF_INPUTS="$DATASET_DIR/finish.txt:$MODELS_DIR/finish.txt:"
# START_PATH="$OUTPUT_DIR/start.txt"
# FINISH_PATH="$OUTPUT_DIR/finish.txt"
# LOCK_PATH="$OUTPUT_DIR/running.lock"
# LAST_OUTPUT="$MODEL_DIR/[[[:D1_MAX_NUMBER_OF_STEPS:]]].meta"
EXPERIMENT_STATUS=1
STARTED_BEFORE=No
# Checks if code is stable, otherwise alerts scheduler
pushd "$SOURCES_GIT_DIR" >/dev/null
GIT_STATUS=$(git status --porcelain)
GIT_COMMIT=$(git log | head -n 1)
popd >/dev/null
if [ "$GIT_STATUS" != "" ]; then
echo 'FATAL: there are uncommitted changes in your git sources file' >&2
echo ' for reproducibility, experiments only run on committed changes' >&2
echo >&2
echo ' Git status returned:'>&2
echo "$GIT_STATUS" >&2
exit 162
fi
# The experiment is already finished - exits with special code so scheduler won't retry
if [[ "$FINISH_PATH" != "-" ]]; then
if [[ -e "$FINISH_PATH" ]]; then
echo 'INFO: this experiment has already finished' >&2
exit 163
fi
fi
# The experiment is not ready to run due to dependencies - alerts scheduler
if [[ "$LIST_OF_INPUTS" != "" ]]; then
IFS=':' tokens_of_input=( $LIST_OF_INPUTS )
input_missing=No
for input_to_check in ${tokens_of_input[*]}; do
if [[ ! -e "$input_to_check" ]]; then
echo "ERROR: input $input_to_check missing for this experiment" >&2
input_missing=Yes
fi
done
if [[ "$input_missing" != No ]]; then
exit 164
fi
fi
# Sets trap to return error code if script is interrupted before successful finish
LOCK_SUCCESS=No
FINISH_STATUS=161
function finish_trap {
if [[ "$LOCK_SUCCESS" == "Yes" ]]; then
rmdir "$LOCK_PATH" &> /dev/null
fi
if [[ "$FINISH_STATUS" == "165" ]]; then
echo 'WARNING: experiment discontinued because other process holds its lock' >&2
else
if [[ "$FINISH_STATUS" == "160" ]]; then
echo 'INFO: experiment finished successfully' >&2
else
[[ "$FINISH_PATH" != "-" ]] && rm -f "$FINISH_PATH"
echo 'ERROR: an error occurred while executing the experiment' >&2
fi
fi
exit "$FINISH_STATUS"
}
trap finish_trap EXIT
# While running, locks experiment so other parallel threads won't attempt to run it too
if mkdir "$LOCK_PATH" --mode=u=rwx,g=rx,o=rx &>/dev/null; then
LOCK_SUCCESS=Yes
else
echo 'WARNING: this experiment is already being executed elsewhere' >&2
FINISH_STATUS="165"
exit
fi
# If the experiment was started before, do any cleanup necessary
if [[ "$START_PATH" != "-" ]]; then
if [[ -e "$START_PATH" ]]; then
echo 'WARNING: this experiment is being restarted' >&2
STARTED_BEFORE=Yes
fi
#...marks start
date -u >> "$START_PATH"
echo GIT "$GIT_COMMIT" >> "$START_PATH"
fi
if [[ "$STARTED_BEFORE" == "Yes" ]]; then
# If the experiment was started before, do any cleanup necessary
echo -n
else
echo "D1_N;D3_N;D4_N;a;b;c;d;e;f;g;h;i;j;m_ap;m_auc;m_tn;m_fp;m_fn;m_tp;m_tpr;m_fpr;k_ap;k_auc;k_tn;k_fp;k_fn;k_tp;k_tpr;k_fpr;isbi_auc" > "$METRICS_PATH"
fi
python \
"$SOURCES_GIT_DIR/etc/compute_metrics.py" \
--metadata_file "$SOURCES_GIT_DIR/data/all-metadata.csv" \
--predictions_format "$PREDICTIONS_FORMAT" \
--metrics_file "$METRICS_TEMP_PATH" \
--predictions_file "$RESULTS_PATH"
EXPERIMENT_STATUS="$?"
echo -n "10;4;1;" >> "$METRICS_PATH"
echo -n "-1;1;-1;1;-1;-1;1;-1;-1;1;" >> "$METRICS_PATH"
tail "$METRICS_TEMP_PATH" -n 1 >> "$METRICS_PATH"
#
#...starts training
if [[ "$EXPERIMENT_STATUS" == "0" ]]; then
if [[ "$LAST_OUTPUT" == "" || -e "$LAST_OUTPUT" ]]; then
if [[ "$FINISH_PATH" != "-" ]]; then
date -u >> "$FINISH_PATH"
echo GIT "$GIT_COMMIT" >> "$FINISH_PATH"
fi
FINISH_STATUS="160"
fi
fi
|
export class SelectOption extends HTMLOptionElement {
constructor(){
super();
this.selected = false;
}
static get observedAttributes() {
return ['selected'];
}
attributeChangedCallback(attrName, oldVal, newVal){
if(name == 'selected' && oldVal != newVal){
this.selected = newVal;
}
}
connectedCallback () {
this.addEventListener("mouseup", this.toggleSelected);
this.addEventListener("mousedown", e => e.preventDefault());
}
disconnectedCallback () {
this.removeEventListener("mouseup");
this.removeEventListener("mousedown");
}
toggleSelected(e) {
e.preventDefault();
this.selected = !this.selected;
this.parentElement.focus();
}
get selected() {
return this.hasAttribute('selected')
}
set selected(isSelected) {
if (isSelected){
this.setAttribute('selected', '');
} else {
this.removeAttribute('selected');
}
}
}
customElements.define('emr-select-option', SelectOption, { extends: 'option'}); |
#!/bin/bash
#$ -q 1-day
#$ -cwd
#$ -l h_vmem=12G
source $BIN_PATH/job.config
if [[ -f $GATK_KEY ]]; then
MUTECT="$JAVA7 -Xmx8g -jar $MUTECT_JAR -T MuTect -et NO_ET -K $GATK_KEY
-log /dev/stderr --logging_level ERROR --only_passing_calls"
else
MUTECT="$JAVA7 -Xmx8g -jar $MUTECT_JAR -T MuTect
-log /dev/stderr --logging_level ERROR --only_passing_calls"
fi
REF=$1
CLONEBAM=$2
TISSUEBAM=$3
OUTPREFIX=$4
CHUNKFILE=$5
INTERVAL=$(awk "NR==${SGE_TASK_ID}" $CHUNKFILE)
OUTPREFIX=$OUTPREFIX.${INTERVAL/:/-}
CHECKSUMDIR=$(dirname $OUTPREFIX)/checksum
MD5PREFIX=$CHECKSUMDIR/$(basename $OUTPREFIX)
if [[ -f $OUTPREFIX.txt && -f $MD5PREFIX.txt.md5 && \
$(md5sum $OUTPREFIX.txt|cut -f1 -d' ') = \
$(cut -f1 -d' ' $MD5PREFIX.txt.md5) ]]; then
echo "$OUTPREFIX.txt exists and matches to the checksum"
else
rm -f $OUTPREFIX.txt $MD5PREFIX.txt.md5
$MUTECT -R $REF -L $INTERVAL \
-I:tumor $CLONEBAM -I:normal $TISSUEBAM \
--out $OUTPREFIX.txt > /dev/null
if [[ $? = 0 ]]; then
mkdir -p $CHECKSUMDIR
md5sum $OUTPREFIX.txt > $MD5PREFIX.txt.md5
fi
fi
|
<reponame>AakashKhatu/iDontNeedThis
import requests
import random
def send_otp(number):
url = "https://www.fast2sms.com/dev/bulk"
otp = random.randint(10000, 99999)
querystring = {"authorization": "<KEY>",
"sender_id": "FSTSMS", "language": "english", "route": "qt",
"numbers": number, "message": "8528",
"variables": "{AA}", "variables_values": otp}
headers = {
'cache-control': "no-cache"
}
response = requests.request(
"GET", url, headers=headers, params=querystring)
return (response.ok, otp)
|
def print_multiplication_table():
for i in range(1,13):
for j in range(1,13):
print(i*j, end="\t")
print()
print_multiplication_table() |
function distributeHorizontalLeft(numItems: number): number[] {
const positions: number[] = [];
for (let i = 0; i < numItems; i++) {
positions.push(i);
}
return positions;
} |
#!/bin/sh
# Test is all examples in the .md files are working
if [ $# -ne 1 ]; then
echo "Usage: test.sh [lean-executable-path]"
exit 1
fi
ulimit -s unlimited
LEAN=$1
NUM_ERRORS=0
for f in `ls *.md`; do
echo "-- testing $f"
awk 'BEGIN{ in_block = 0 } !/```/{ if (in_block == 1) print $0; else print "" } /```/ && !/```lua/{ in_block = 0; print "" } /```lua/{ in_block = 1; print "" }' $f > $f.lua
if $LEAN $f.lua > $f.produced.out; then
echo "-- worked"
else
echo "ERROR executing $f.lua, produced output is at $f.produced.out"
NUM_ERRORS=$(($NUM_ERRORS+1))
fi
done
if [ $NUM_ERRORS -gt 0 ]; then
echo "-- Number of errors: $NUM_ERRORS"
exit 1
else
echo "-- Passed"
exit 0
fi
|
<filename>sshd-sftp/src/test/java/org/apache/sshd/sftp/client/SftpOutputStreamWithChannel.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.sftp.client;
import java.io.IOException;
import java.util.Collection;
import java.util.Objects;
import org.apache.sshd.common.util.io.OutputStreamWithChannel;
import org.apache.sshd.sftp.client.SftpClient.CloseableHandle;
import org.apache.sshd.sftp.client.SftpClient.OpenMode;
/**
* Implements an output stream for a given remote file
*
* @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a>
*/
public class SftpOutputStreamWithChannel extends OutputStreamWithChannel implements SftpClientHolder {
private final SftpClient client;
private final String path;
private final byte[] bb = new byte[1];
private final byte[] buffer;
private int index;
private CloseableHandle handle;
private long offset;
public SftpOutputStreamWithChannel(SftpClient client, int bufferSize, String path,
Collection<OpenMode> mode) throws IOException {
this.client = Objects.requireNonNull(client, "No SFTP client instance");
this.path = path;
buffer = new byte[bufferSize];
handle = client.open(path, mode);
}
@Override
public final SftpClient getClient() {
return client;
}
/**
* The remotely accessed file path
*
* @return Remote file path
*/
public final String getPath() {
return path;
}
@Override
public boolean isOpen() {
return (handle != null) && handle.isOpen();
}
@Override
public void write(int b) throws IOException {
bb[0] = (byte) b;
write(bb, 0, 1);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
if (!isOpen()) {
throw new IOException("write(" + getPath() + ")[len=" + len + "] stream is closed");
}
do {
int nb = Math.min(len, buffer.length - index);
System.arraycopy(b, off, buffer, index, nb);
index += nb;
if (index == buffer.length) {
flush();
}
off += nb;
len -= nb;
} while (len > 0);
}
@Override
public void flush() throws IOException {
if (!isOpen()) {
throw new IOException("flush(" + getPath() + ") stream is closed");
}
client.write(handle, offset, buffer, 0, index);
offset += index;
index = 0;
}
@Override
public void close() throws IOException {
if (isOpen()) {
try {
try {
if (index > 0) {
flush();
}
} finally {
handle.close();
}
} finally {
handle = null;
}
}
}
}
|
public class WarningSystem {
private List<String> warnings;
public WarningSystem() {
this.warnings = new ArrayList<>();
}
// Add a warning to the system
public void addWarning(String warning) {
warnings.add(warning);
}
// Retrieve all warnings in the system
public List<String> getWarnings() {
return new ArrayList<>(warnings);
}
// Retrieve the count of warnings in the system
public int countWarnings() {
return warnings.size();
}
} |
<filename>tests/lib/rules/prefer-parameter-instance.spec.ts
import { RuleTester } from "../../util";
import rule from "../../../lib/rules/prefer-parameter-instance";
const tester = new RuleTester({
parser: "@typescript-eslint/parser",
parserOptions: {
sourceType: "module",
ecmaFeatures: {
jsx: true,
},
},
});
tester.run("prefer-parameter-instance", rule, {
valid: [
{
code: `
const user: User<UserProps> = {
age: 'name',
info: {
age: 20
}
}
`,
options: [
{
onlyReference: true,
},
],
},
{
code: `
const user: User<{age: number}> = {
name: 'name',
info: {
age: 20
}
}
`,
options: [
{
onlyReference: false,
},
],
},
{
code: `
const user: User<string> = {
name: 'name',
info: {
age: 20
}
}
`,
options: [
{
onlyReference: true,
allowKeywords: true,
},
],
},
],
invalid: [
{
code: `
const user: User<{age: number}> = {
name: 'name',
info: {
age: 20
}
}
`,
options: [
{
onlyReference: true,
},
],
errors: [{ messageId: "onlyReference" }],
},
{
code: `
const user: User<{age: number}> = {
name: 'name',
info: {
age: 20
}
}
`,
options: [
{
onlyReference: true,
allowKeywords: true,
},
],
errors: [{ messageId: "onlyReference" }],
},
],
});
|
// Generated by CoffeeScript 1.9.1
var $, SPACES_ONLY, Serialiser, TEXT_LEADING_WHITESPACE, TEXT_TRAILING_WHITESPACE, WHITESPACE_ONLY, containsNewlines, entityDecode, exports, find, firstNonWhitespaceChild, genericBranchSerialiser, genericLeafSerialiser, joinList, last, nodeSerialisers, ref, serialise, stringEscape, tagConvention;
ref = require('./helpers'), last = ref.last, find = ref.find;
$ = require('./symbols');
stringEscape = require('./stringescape');
entityDecode = require('./entitydecode');
module.exports = exports = serialise = function(parseTree) {
return new Serialiser().serialise(parseTree);
};
Serialiser = (function() {
function Serialiser() {}
Serialiser.prototype.serialise = function(parseTree) {
var domObjectParts;
if (parseTree.children && parseTree.children.length && parseTree.children[0].type === $.CJSX_PRAGMA) {
this.domObject = parseTree.children[0].value;
} else {
this.domObject = 'React.DOM';
}
domObjectParts = this.domObject.split('.');
if (domObjectParts.length > 0 && domObjectParts[0] !== '') {
this.reactObject = domObjectParts[0];
} else {
this.reactObject = 'React';
}
return this.serialiseNode(parseTree);
};
Serialiser.prototype.serialiseNode = function(node) {
var serialised;
if (nodeSerialisers[node.type] == null) {
throw new Error("unknown parseTree node type " + node.type);
}
serialised = nodeSerialisers[node.type].call(this, node);
if (!(typeof serialised === 'string' || serialised === null)) {
throw new Error("serialiser " + node.type + " didn\'t return a string");
}
return serialised;
};
Serialiser.prototype.serialiseSpreadAndPairAttributes = function(children) {
var accumulatedWhitespace, assignIndex, assignItem, assigns, assignsWithWhitespace, child, childIndex, flushPairs, j, joinedAssigns, k, lastAssignWithWhitespace, len, len1, pairAttrsBuffer, ref1, trailingWhiteplace;
assigns = [];
pairAttrsBuffer = [];
flushPairs = (function(_this) {
return function() {
var serialisedChild, serialisedPairs;
if (pairAttrsBuffer.length) {
serialisedChild = _this.serialiseAttributePairs(pairAttrsBuffer);
if (serialisedChild) {
assigns.push({
type: $.CS,
value: serialisedChild
});
} else {
serialisedPairs = pairAttrsBuffer.map(function(p) {
return _this.serialiseNode(p);
}).join('').replace('\n', '\\\n');
assigns.push({
type: $.CJSX_WHITESPACE,
value: serialisedPairs
});
}
return pairAttrsBuffer = [];
}
};
})(this);
if (((ref1 = firstNonWhitespaceChild(children)) != null ? ref1.type : void 0) === $.CJSX_ATTR_SPREAD) {
assigns.push({
type: $.CS,
value: '{}'
});
}
for (childIndex = j = 0, len = children.length; j < len; childIndex = ++j) {
child = children[childIndex];
if (child.type === $.CJSX_ATTR_SPREAD) {
flushPairs();
assigns.push({
type: $.CS,
value: child.value
});
} else {
pairAttrsBuffer.push(child);
}
}
flushPairs();
accumulatedWhitespace = '';
assignsWithWhitespace = [];
for (assignIndex = k = 0, len1 = assigns.length; k < len1; assignIndex = ++k) {
assignItem = assigns[assignIndex];
if (assignItem != null) {
if (assignItem.type === $.CJSX_WHITESPACE) {
accumulatedWhitespace += this.serialiseNode(assignItem);
} else {
assignsWithWhitespace.push(accumulatedWhitespace + this.serialiseNode(assignItem));
accumulatedWhitespace = '';
}
}
}
if (assignsWithWhitespace.length) {
lastAssignWithWhitespace = assignsWithWhitespace.pop();
trailingWhiteplace = accumulatedWhitespace.replace('\\\n', '\n');
assignsWithWhitespace.push(lastAssignWithWhitespace + trailingWhiteplace);
}
joinedAssigns = joinList(assignsWithWhitespace);
return "React.__spread(" + (joinList(assignsWithWhitespace)) + ")";
};
Serialiser.prototype.serialiseAttributePairs = function(children) {
var child, childIndex, indexOfLastSemanticChild, isBeforeLastSemanticChild, ref1, semanticChildren, serialisedChild, serialisedChildren, whitespaceChildren;
ref1 = children.reduce(function(partitionedChildren, child) {
if (child.type === $.CJSX_WHITESPACE) {
partitionedChildren[0].push(child);
} else {
partitionedChildren[1].push(child);
}
return partitionedChildren;
}, [[], []]), whitespaceChildren = ref1[0], semanticChildren = ref1[1];
indexOfLastSemanticChild = children.lastIndexOf(last(semanticChildren));
isBeforeLastSemanticChild = function(childIndex) {
return childIndex < indexOfLastSemanticChild;
};
if (semanticChildren.length) {
serialisedChildren = (function() {
var j, len, results;
results = [];
for (childIndex = j = 0, len = children.length; j < len; childIndex = ++j) {
child = children[childIndex];
serialisedChild = this.serialiseNode(child);
if (child.type === $.CJSX_WHITESPACE) {
if (containsNewlines(serialisedChild)) {
if (isBeforeLastSemanticChild(childIndex)) {
results.push(serialisedChild.replace('\n', ' \\\n'));
} else {
results.push(serialisedChild);
}
} else {
results.push(null);
}
} else if (isBeforeLastSemanticChild(childIndex)) {
results.push(serialisedChild + ', ');
} else {
results.push(serialisedChild);
}
}
return results;
}).call(this);
return '{' + serialisedChildren.join('') + '}';
} else {
return null;
}
};
return Serialiser;
})();
genericBranchSerialiser = function(node) {
return node.children.map((function(_this) {
return function(child) {
return _this.serialiseNode(child);
};
})(this)).join('');
};
genericLeafSerialiser = function(node) {
return node.value;
};
tagConvention = /^[a-z]|\-/;
nodeSerialisers = {
ROOT: genericBranchSerialiser,
CJSX_PRAGMA: function() {
return "`/** @jsx " + this.domObject + " */`";
},
CJSX_EL: function(node) {
var accumulatedWhitespace, child, element, j, len, ref1, serialisedChild, serialisedChildren;
serialisedChildren = [];
accumulatedWhitespace = '';
ref1 = node.children;
for (j = 0, len = ref1.length; j < len; j++) {
child = ref1[j];
serialisedChild = this.serialiseNode(child);
if (child != null) {
if (serialisedChild.length === 0 || WHITESPACE_ONLY.test(serialisedChild)) {
accumulatedWhitespace += serialisedChild;
} else {
serialisedChildren.push(accumulatedWhitespace + serialisedChild);
accumulatedWhitespace = '';
}
}
}
if (serialisedChildren.length) {
serialisedChildren[serialisedChildren.length - 1] += accumulatedWhitespace;
accumulatedWhitespace = '';
}
if (tagConvention.test(node.value)) {
element = '"' + node.value + '"';
} else {
element = node.value;
}
return this.reactObject + ".createElement(" + element + ", " + (joinList(serialisedChildren)) + ")";
},
CJSX_COMMENT: function(node) {
return '';
},
CJSX_ESC: function(node) {
var childrenSerialised;
childrenSerialised = node.children.map((function(_this) {
return function(child) {
return _this.serialiseNode(child);
};
})(this)).join('');
return '(' + childrenSerialised + ')';
},
CJSX_ATTRIBUTES: function(node) {
if (node.children.some(function(child) {
return child.type === $.CJSX_ATTR_SPREAD;
})) {
return this.serialiseSpreadAndPairAttributes(node.children);
} else {
return this.serialiseAttributePairs(node.children) || 'null';
}
},
CJSX_ATTR_PAIR: function(node) {
return node.children.map((function(_this) {
return function(child) {
return _this.serialiseNode(child);
};
})(this)).join(': ');
},
CJSX_ATTR_SPREAD: function(node) {
return node.value;
},
CS: genericLeafSerialiser,
CS_COMMENT: genericLeafSerialiser,
CS_HEREDOC: genericLeafSerialiser,
CS_STRING: genericLeafSerialiser,
CS_REGEX: genericLeafSerialiser,
CS_HEREGEX: genericLeafSerialiser,
JS_ESC: genericLeafSerialiser,
CJSX_WHITESPACE: genericLeafSerialiser,
CJSX_TEXT: function(node) {
var escapedText, leftSpace, leftTrim, rightSpace, rightTrim, text, trimmedText;
text = node.value;
if (containsNewlines(text)) {
if (WHITESPACE_ONLY.test(text)) {
return text;
} else {
leftSpace = text.match(TEXT_LEADING_WHITESPACE);
rightSpace = text.match(TEXT_TRAILING_WHITESPACE);
if (leftSpace) {
leftTrim = text.indexOf('\n');
} else {
leftTrim = 0;
}
if (rightSpace) {
rightTrim = text.lastIndexOf('\n') + 1;
} else {
rightTrim = text.length;
}
trimmedText = text.substring(leftTrim, rightTrim);
escapedText = stringEscape(entityDecode(trimmedText), {
preserveNewlines: true
});
return '"""' + escapedText + '"""';
}
} else {
if (text === '') {
return null;
} else {
return '"' + stringEscape(entityDecode(text)) + '"';
}
}
},
CJSX_ATTR_KEY: genericLeafSerialiser,
CJSX_ATTR_VAL: genericLeafSerialiser
};
firstNonWhitespaceChild = function(children) {
return find.call(children, function(child) {
return child.type !== $.CJSX_WHITESPACE;
});
};
containsNewlines = function(text) {
return text.indexOf('\n') > -1;
};
joinList = function(items) {
var i, output;
output = items[items.length - 1];
i = items.length - 2;
while (i >= 0) {
if (output.charAt(0) === '\n') {
output = items[i] + ',' + output;
} else {
output = items[i] + ', ' + output;
}
i--;
}
return output;
};
SPACES_ONLY = /^\s+$/;
WHITESPACE_ONLY = /^[\n\s]+$/;
TEXT_LEADING_WHITESPACE = /^\s*?\n\s*/;
TEXT_TRAILING_WHITESPACE = /\s*?\n\s*?$/;
exports.Serialiser = Serialiser;
exports.nodeSerialisers = nodeSerialisers;
|
/*
* Copyright © 2018, 2021 Apple Inc. and the ServiceTalk project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.servicetalk.concurrent.api.single;
import io.servicetalk.concurrent.api.LegacyTestSingle;
import io.servicetalk.concurrent.api.Single;
import io.servicetalk.concurrent.internal.DeliberateException;
import io.servicetalk.concurrent.test.internal.TestSingleSubscriber;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import static io.servicetalk.concurrent.api.SourceAdapters.toSource;
import static io.servicetalk.concurrent.internal.DeliberateException.DELIBERATE_EXCEPTION;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.sameInstance;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.Mockito.verify;
public abstract class AbstractWhenCancelTest {
private final TestSingleSubscriber<String> listener = new TestSingleSubscriber<>();
@Test
void testCancelAfterSuccess() {
Runnable onCancel = Mockito.mock(Runnable.class);
toSource(doCancel(Single.succeeded("Hello"), onCancel)).subscribe(listener);
listener.awaitSubscription().cancel();
verify(onCancel).run();
}
@Test
void testCancelNoEmissions() {
Runnable onCancel = Mockito.mock(Runnable.class);
toSource(doCancel(Single.<String>never(), onCancel)).subscribe(listener);
listener.awaitSubscription().cancel();
verify(onCancel).run();
}
@Test
void testCallbackThrowsError() {
LegacyTestSingle<String> single = new LegacyTestSingle<>();
Exception e = assertThrows(DeliberateException.class, () -> {
try {
toSource(doCancel(single, () -> {
throw DELIBERATE_EXCEPTION;
})).subscribe(listener);
listener.awaitSubscription().cancel();
} finally {
single.verifyCancelled();
}
});
assertThat(e, is(sameInstance(DELIBERATE_EXCEPTION)));
}
protected abstract <T> Single<T> doCancel(Single<T> single, Runnable runnable);
}
|
<reponame>ineunetOS/knife-commons<gh_stars>0
/*
* Copyright 2013-2016 iNeunet OpenSource and the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.ineunet.knife.qlmap.criteria;
import java.util.Map;
/**
* @author <NAME>
*/
public interface ICriteria {
ICriteria addIfNotBlank(Restrictor restrictor);
ICriteria addRestrictor(Restrictor restrictor);
/**
* @param select such as <code>"id, name"</code>, or *
* @return criteria
*/
ICriteria setSelectColumns(String select);
String getQueryString();
String getCountString();
String getDeleteString();
Object[] getValues();
/**
* @since 1.0.5
* @return map of named values
*/
Map<String, Object> getNamedValues();
ICriteria orderBy(String orderby);
String getOrderBy();
ICriteria limit(int start, int rows);
/**
* @return 表别名
* @since 1.0.5
*/
String getAlias();
boolean isNamedParam();
}
|
<gh_stars>100-1000
//
// ZMessage.h
// Zulip
//
// Created by <NAME> on 8/2/13.
//
//
#import <Foundation/Foundation.h>
#import <CoreData/CoreData.h>
@class ZSubscription, ZUser, RawMessage;
@interface ZMessage : NSManagedObject
@property (nonatomic, retain) NSString * avatar_url;
@property (nonatomic, retain) NSString * content;
@property (nonatomic, retain) NSNumber * messageID;
@property (nonatomic, retain) NSString * stream_recipient;
@property (nonatomic, retain) NSString * subject;
@property (nonatomic, retain) NSDate * timestamp;
@property (nonatomic, retain) NSString * type;
@property (nonatomic, retain) NSData * flagData;
@property (nonatomic, retain) NSSet *pm_recipients;
@property (nonatomic, retain) ZUser *sender;
@property (nonatomic, retain) ZSubscription *subscription;
// NOTE: These two methods below have been manually added,
// when regenerating this file make sure to keep them!
// Do not use these directly. Use RawMessage objects
// to manipulate messages
- (NSSet *)messageFlags;
- (void)setMessageFlags:(NSSet *)flags;
// NOTE added manually, retain when regenerating!
@property (nonatomic, retain) RawMessage *linkedRawMessage;
@end
@interface ZMessage (CoreDataGeneratedAccessors)
- (void)addPm_recipientsObject:(ZUser *)value;
- (void)removePm_recipientsObject:(ZUser *)value;
- (void)addPm_recipients:(NSSet *)values;
- (void)removePm_recipients:(NSSet *)values;
@end
|
const Discord = require('discord.js');
module.exports = {
name: "help-fun",
aliases: ['helpfun', 'bothelpfun'],
description: "Help on the fun section",
cooldown: 2,
execute(message, args){
const funEmbed = new Discord.MessageEmbed()
.setTitle('Commands for fun section.')
.setThumbnail(message.author.displayAvatarURL({ dyanmic: true }))
.setAuthor(`${message.author.tag}`)
.setColor('RANDOM')
.setTimestamp()
.addFields(
{ name: '**!beep**', value: 'Boop' },
{ name: '**!8ball**', value: 'Let the bot decide' },
{ name: '**!bitcoin**', value: 'Look up bitcoins price' },
{ name: '**!osuprofile**', value: 'COMING SOON!' },
{ name: '**!sus**', value: 'amog us' },
)
message.channel.send(funEmbed)
}
}
|
<filename>src/components/RepoValidationCard.js
import { useEffect, useState, useContext } from 'react'
import PropTypes from 'prop-types'
import { Card } from 'translation-helps-rcl'
import { BIBLE_AND_OBS } from '@common/BooksOfTheBible'
import { AuthContext } from '@context/AuthContext'
import { StoreContext } from '@context/StoreContext'
import { AdminContext } from '@context/AdminContext'
import React from 'react';
//import { makeStyles } from '@material-ui/core/styles';
import { checkTwForBook, checkTaForBook } from '@utils/checkArticles'
import * as csv from '@utils/csvMaker'
import { WORKING, OK, NO_TWL_REPO, SEE_TWL_ERROR, SEE_TN_ERROR, RETRIEVING, VALIDATION_FINISHED, TQ, SQ, SN, TN, TWL, LT, ST }
from '@common/constants'
import DenseTable from './DenseTable'
import { checkManifestBook } from '@common/manifests'
import { applyIcon } from './iconHelper'
import { cvCombine } from '@utils/contentValidation'
export default function RepoValidationCard({
bookId,
classes,
onClose: removeBook,
}) {
// TW
const [twErrorMsg, setTwErrorMsg] = useState(WORKING)
const [twMissing, setTwMissing] = useState({})
const [twCv, setTwCv] = useState(null)
// TA
const [taErrorMsg, setTaErrorMsg] = useState(WORKING)
const [taMissing, setTaMissing] = useState({})
const [taCv, setTaCv] = useState(null)
// LT (GLT or ULT)
const [ltBookErrorMsg, setLtBookErrorMsg] = useState(null)
const [ltFilename, setLtFilename] = useState(null)
const [ltCv, setLtCv] = useState(null)
// ST (GST or UST)
const [stBookErrorMsg, setStBookErrorMsg] = useState(null)
const [stFilename, setStFilename] = useState(null)
const [stCv, setStCv] = useState(null)
// TN
const [tnBookErrorMsg, setTnBookErrorMsg] = useState(null)
const [tnFilename, setTnFilename] = useState(null)
const [tnCv, setTnCv] = useState(null)
// TWL
const [twlBookErrorMsg, setTwlBookErrorMsg] = useState(null)
const [twlFilename, setTwlFilename] = useState(null)
const [twlCv, setTwlCv] = useState(null)
// TQ
const [tqBookErrorMsg, setTqBookErrorMsg] = useState(null)
const [tqFilename, setTqFilename] = useState(null)
const [tqCv, setTqCv] = useState(null)
// SQ
const [sqBookErrorMsg, setSqBookErrorMsg] = useState(null)
const [sqFilename, setSqFilename] = useState(null)
const [sqCv, setSqCv] = useState(null)
// SN
const [snBookErrorMsg, setSnBookErrorMsg] = useState(null)
const [snFilename, setSnFilename] = useState(null)
const [snCv, setSnCv] = useState(null)
const {
state: {
authentication,
},
} = useContext(AuthContext)
const {
state: {
owner,
server,
languageId,
},
} = useContext(StoreContext)
const {
state: {
tnRepoTree,
tnRepoTreeManifest,
tnManifestSha,
tnRepoTreeStatus,
twlRepoTree,
twlRepoTreeManifest,
twlManifestSha,
twlRepoTreeStatus,
ltRepoTree,
ltRepoTreeManifest,
ltManifestSha,
ltRepoTreeStatus,
stRepoTree,
stRepoTreeManifest,
stManifestSha,
stRepoTreeStatus,
tqRepoTree,
tqRepoTreeManifest,
tqManifestSha,
tqRepoTreeStatus,
sqRepoTree,
sqRepoTreeManifest,
sqManifestSha,
sqRepoTreeStatus,
snRepoTree,
snRepoTreeManifest,
snManifestSha,
snRepoTreeStatus,
taRepoTree,
taRepoTreeManifest,
taManifestSha,
taRepoTreeStatus,
twRepoTree,
twRepoTreeManifest,
twManifestSha,
twRepoTreeStatus,
refresh,
},
actions: {
setRefresh,
}
} = useContext(AdminContext)
/*
--
-- TW
--
*/
useEffect(() => {
if ( twlBookErrorMsg === null ) {
return // wait until we know the result
}
// no need to re-evaluate TW articles when twl is validating
if ( twlBookErrorMsg === RETRIEVING || twlBookErrorMsg === VALIDATION_FINISHED ) {
return
}
async function getTwWords() {
setTwErrorMsg(WORKING)
const rc = await checkTwForBook(authentication, bookId, languageId, owner, server, twRepoTree)
setTwErrorMsg(rc.Status ? rc.Status : null)
const lists = { Present: rc.Present, Absent: rc.Absent}
setTwMissing(lists)
}
// check twl repo first
if ( twlRepoTreeStatus === WORKING ) {
return
}
// check tw repo first
if ( twRepoTreeStatus === WORKING ) {
return
}
// OK repo is there as is manifest, but we won't be using the manifest for TW
// Now check to see if there is twlRepo error
if ( twlRepoTreeStatus !== null ) {
setTwErrorMsg(NO_TWL_REPO)
return
}
// OK, now check whether the twl book file is present
if ( twlBookErrorMsg === OK ) {
// All looks good... let's get the TWL book file
// fetch it!
if (authentication && twRepoTree && twlRepoTree) {
getTwWords()
}
} else {
setTwErrorMsg(SEE_TWL_ERROR)
}
}, [twRepoTree, twRepoTreeStatus, twlRepoTree, twlRepoTreeStatus, twlBookErrorMsg, OK])
/*
--
-- TA
--
*/
useEffect(() => {
if ( tnBookErrorMsg === null ) {
return // wait until we know the result
}
// no need to re-evaluate TA articles when TN is being validated
if ( tnBookErrorMsg === VALIDATION_FINISHED || tnBookErrorMsg === RETRIEVING ) {
return
}
async function getTaWords() {
setTaErrorMsg(WORKING)
const rc = await checkTaForBook(authentication, bookId, languageId, owner, server, taRepoTree)
setTaErrorMsg(rc.Status ? rc.Status : null)
const lists = { Present: rc.Present, Absent: rc.Absent}
setTaMissing(lists)
}
// check tn repo first
if ( tnRepoTreeStatus === WORKING ) {
return
}
// OK, repo is there as is manifest, but we won't be using the manifest for TA
// Now check to see if there is tnRepo error
if ( tnRepoTreeStatus !== null ) {
setTaErrorMsg(SEE_TN_ERROR)
return
}
// check ta repo to make sure its ready
if ( taRepoTreeStatus === WORKING ) {
return
}
// OK, now check whether the tn book file is present
if ( tnBookErrorMsg === OK ) {
// All looks good... let's get the TWL book file
// fetch it!
if (authentication && taRepoTree && tnRepoTree) {
getTaWords()
}
} else {
setTaErrorMsg(SEE_TN_ERROR)
}
}, [taRepoTree, taRepoTreeStatus, tnRepoTree, tnRepoTreeStatus, tnBookErrorMsg, OK])
/*
--
-- TN
--
*/
useEffect(() => {
checkManifestBook(bookId, tnRepoTreeManifest, tnRepoTree, setTnBookErrorMsg, setTnFilename, TN)
}, [bookId, tnRepoTree, tnRepoTreeManifest])
/*
--
-- TWL
--
*/
useEffect(() => {
checkManifestBook(bookId, twlRepoTreeManifest, twlRepoTree, setTwlBookErrorMsg, setTwlFilename, TWL)
}, [bookId, twlRepoTree, twlRepoTreeManifest])
/*
--
-- LT (ult or glt)
--
*/
useEffect(() => {
checkManifestBook(bookId, ltRepoTreeManifest, ltRepoTree, setLtBookErrorMsg, setLtFilename, LT)
}, [bookId, ltRepoTree, ltRepoTreeManifest])
/*
--
-- ST (ust or gst)
--
*/
useEffect(() => {
checkManifestBook(bookId, stRepoTreeManifest, stRepoTree, setStBookErrorMsg, setStFilename, ST)
}, [bookId, stRepoTree, stRepoTreeManifest])
/*
--
-- TQ
--
*/
useEffect(() => {
checkManifestBook(bookId, tqRepoTreeManifest, tqRepoTree, setTqBookErrorMsg, setTqFilename, TQ)
}, [bookId, tqRepoTree, tqRepoTreeManifest])
/*
--
-- SQ
--
*/
useEffect(() => {
checkManifestBook(bookId, sqRepoTreeManifest, sqRepoTree, setSqBookErrorMsg, setSqFilename, SQ)
}, [bookId, sqRepoTree, sqRepoTreeManifest])
/*
--
-- SN
--
*/
useEffect(() => {
checkManifestBook(bookId, snRepoTreeManifest, snRepoTree, setSnBookErrorMsg, setSnFilename, SN)
}, [bookId, snRepoTree, snRepoTreeManifest])
let _ltRepo = languageId
let _stRepo = languageId
if ( owner === "unfoldingWord" || owner === "unfoldingword" ) {
_ltRepo += "_ult"
_stRepo += "_ust"
} else {
_ltRepo += "_glt"
_stRepo += "_gst"
}
const headers = ["Resource", "Repo", "Status", "Action"]
const rows = [
["Literal Translation", `${_ltRepo}`, ltRepoTreeStatus || ltBookErrorMsg,
applyIcon(server,owner,bookId,refresh,setRefresh,_ltRepo,ltRepoTreeStatus,ltBookErrorMsg, ltRepoTreeManifest, ltManifestSha,
null, ltFilename, setLtCv, ltCv, setLtBookErrorMsg,
)
],
["Simplified Translation", `${_stRepo}`, stRepoTreeStatus || stBookErrorMsg,
applyIcon(server,owner,bookId,refresh,setRefresh,_stRepo,stRepoTreeStatus,stBookErrorMsg, stRepoTreeManifest, stManifestSha,
null, stFilename, setStCv, stCv, setStBookErrorMsg,
)
],
["Translation Notes", `${languageId}_tn`, tnRepoTreeStatus || tnBookErrorMsg,
applyIcon(server,owner,bookId,refresh,setRefresh,`${languageId}_tn`,tnRepoTreeStatus,tnBookErrorMsg, tnRepoTreeManifest, tnManifestSha,
null, tnFilename, setTnCv, tnCv, setTnBookErrorMsg,
)
],
["Translation Word List", `${languageId}_twl`, twlRepoTreeStatus || twlBookErrorMsg,
applyIcon(server,owner,bookId,refresh,setRefresh,`${languageId}_twl`,twlRepoTreeStatus,twlBookErrorMsg, twlRepoTreeManifest, twlManifestSha,
null, twlFilename, setTwlCv, twlCv, setTwlBookErrorMsg,
)
],
["Translation Words", `${languageId}_tw`, twRepoTreeStatus || twErrorMsg,
applyIcon(server,owner,bookId,refresh,setRefresh,`${languageId}_tw`,twRepoTreeStatus,twErrorMsg, twRepoTreeManifest, twManifestSha, twMissing,
null, setTwCv, twCv, setTwErrorMsg,
)
],
["Translation Academy", `${languageId}_ta`, taRepoTreeStatus || taErrorMsg,
applyIcon(server,owner,bookId,refresh,setRefresh,`${languageId}_ta`,taRepoTreeStatus,taErrorMsg, taRepoTreeManifest, taManifestSha, taMissing,
null, setTaCv, taCv, setTaErrorMsg,
)
],
["Translation Questions", `${languageId}_tq`, tqRepoTreeStatus || tqBookErrorMsg,
applyIcon(server,owner,bookId,refresh,setRefresh,`${languageId}_tq`,tqRepoTreeStatus,tqBookErrorMsg, tqRepoTreeManifest, tqManifestSha,
null, tqFilename, setTqCv, tqCv, setTqBookErrorMsg,
)
],
["Study Questions", `${languageId}_sq`, sqRepoTreeStatus || sqBookErrorMsg,
applyIcon(server,owner,bookId,refresh,setRefresh,`${languageId}_sq`,sqRepoTreeStatus,sqBookErrorMsg, sqRepoTreeManifest, sqManifestSha,
null, sqFilename, setSqCv, sqCv, setSqBookErrorMsg,
)
],
["Study Notes", `${languageId}_sn`, snRepoTreeStatus || snBookErrorMsg,
applyIcon(server,owner,bookId,refresh,setRefresh,`${languageId}_sn`,snRepoTreeStatus,snBookErrorMsg, snRepoTreeManifest, snManifestSha,
null, snFilename, setSnCv, snCv, setSnBookErrorMsg,
)
],
]
return (
<Card title={BIBLE_AND_OBS[bookId]}
classes={classes}
hideMarkdownToggle={true}
closeable={true}
onClose={() => removeBook(bookId)}
>
<DenseTable cols={headers} rows={rows} />
</Card>
)
}
RepoValidationCard.propTypes = {
bookId: PropTypes.string,
classes: PropTypes.object,
}
|
#!/bin/sh
# this tests whether all required args are listed as
# missing when no arguments are specified
# failure
./simple-test.sh `basename $0 .sh` test11 -v "1 2 3"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.