text stringlengths 1 1.05M |
|---|
CUDA_VISIBLE_DEVICES=$1 \
python -m src.training.experiment_linear_drugbank_mixup \
-p ./data/fromraw_cid_inchi_smiles_fp_labels_onehots.csv \
--outside-path ./data/DrugBank_smiles_fp.csv \
-e 1000 \
-b 256 \
--es-patience 20 \
--log-path ./logs/linear/drugbank_mixup \
--repeat 5 \
--mixup 0.4 \
--mixup-repeat 10
|
import os
import lmdb # install lmdb by "pip install lmdb"
import cv2
import numpy as np
import glob
from itertools import islice
def checkImageIsValid(imageBin):
if imageBin is None:
return False
imageBuf = np.fromstring(imageBin, dtype=np.uint8)
img = cv2.imdecode(imageBuf, cv2.IMREAD_GRAYSCALE)
imgH, imgW = img.shape[0], img.shape[1]
if imgH * imgW == 0:
return False
return True
# def writeCache(env, cache):
# with env.begin(write=True) as txn:
# for k, v in cache.iteritems():
# txn.put(k, v)
# ### python3中修改为
def writeCache(env, cache):
with env.begin(write=True) as txn:
for k, v in cache.items():
if type(v) == str:
v = v.encode()
txn.put(k.encode(), v)
def createDataset(outputPath, imagePathList, labelList, lexiconList=None, checkValid=True):
"""
Create LMDB dataset for CRNN training.
ARGS:
outputPath : LMDB output path
imagePathList : list of image path
labelList : list of corresponding groundtruth texts
lexiconList : (optional) list of lexicon lists
checkValid : if true, check the validity of every image
"""
assert(len(imagePathList) == len(labelList))
nSamples = len(imagePathList)
# map_size=1073741824 定义最大空间是1GB,如果在windows系统上报错改成map_size=8589934592
env = lmdb.open(outputPath, map_size=1099511627776)
cache = {}
cnt = 1
for i in range(nSamples):
imagePath = imagePathList[i]
label = labelList[i]
if not os.path.exists(imagePath):
print('%s does not exist' % imagePath)
continue
with open(imagePath, 'rb') as f:
imageBin = f.read()
if checkValid:
if not checkImageIsValid(imageBin):
print('%s is not a valid image' % imagePath)
continue
########## .mdb数据库文件保存了两种数据,一种是图片数据,一种是标签数据,它们各有其key
imageKey = 'image-%09d' % cnt
labelKey = 'label-%09d' % cnt
cache[imageKey] = imageBin
cache[labelKey] = label
########
if lexiconList:
lexiconKey = 'lexicon-%09d' % cnt
cache[lexiconKey] = ' '.join(lexiconList[i])
if cnt % 1000 == 0:
writeCache(env, cache)
cache = {}
print('Written %d / %d' % (cnt, nSamples))
cnt += 1
nSamples = cnt-1
cache['num-samples'] = str(nSamples)
writeCache(env, cache)
print('Created dataset with %d samples' % nSamples)
def read_text(path):
with open(path) as f:
text = f.read()
text = text.strip()
return text
if __name__ == '__main__':
m = 'from txt' # 'from name':标签从图片名字上获取。 'from txt': 标签从标签文件获取
outputPath = '../data/lol/lol_val' # lmdb 输出目录
# 训练图片路径,标签是txt格式,名字跟图片名字要一致,如123.jpg对应标签需要是123.txt
path = '/home/xfz/Projects/PycharmProjects/TextRecognitionDataGenerator-master/trdg/out_comm/val/labels.txt' # 标签文件, 当m='from name'时候为图片文件夹位置
imgPaths = []
labellist = []
if m == 'from name':
# 获取该目录下所有文件,存入列表中
f_nameList = os.listdir(path)
for imgname in f_nameList:
# 通过文件名后缀过滤文件类型
exp = imgname.rsplit('.')[-1]
if exp not in ['png', 'jpg']:
continue
label = imgname.split('_', 1)[0]
imgPaths.append(os.path.join(path, imgname))
labellist.append(label)
elif m == 'from txt':
rootpath = path.rsplit(os.path.sep, 1)[0]
with open(path, encoding='utf-8-sig') as f:
for line in f:
imgname, label = line.split(',', 1)
imgPaths.append(os.path.join(rootpath, 'images', imgname.strip()))
labellist.append(label.strip().replace('"', ''))
else:
raise ValueError("m内容错误,支持的内容:'from name', 'from txt'")
# imagePathList = glob.glob(path)
# print('一共%d张图片'%(len(imagePathList)))
# imgLabelLists = []
# for p in imagePathList:
# try:
# imgLabelLists.append((p, read_text(p.replace('.jpg', '.txt'))))
# except:
# continue
#
# # imgLabelList = [ (p,read_text(p.replace('.jpg','.txt'))) for p in imagePathList]
# ##sort by lebelList
# imgLabelList = sorted(imgLabelLists, key=lambda x: len(x[1]))
# imgPaths = [p[0] for p in imgLabelList]
# txtLists = [p[1] for p in imgLabelList]
createDataset(outputPath, imgPaths, labellist, lexiconList=None, checkValid=True)
# ### 读取LMDB数据集中图片并显示出来,验证一下数据集是否制作成功
val_num = 10
with lmdb.open(outputPath) as env:
txn = env.begin()
for key, value in islice(txn.cursor(), val_num):
imageBuf = np.fromstring(value, dtype=np.uint8)
img = cv2.imdecode(imageBuf, cv2.IMREAD_GRAYSCALE)
if img is not None:
# 得到图片对应 label
key = key.decode().replace('image', 'label', 1).encode()
label = txn.get(key).decode()
print(label)
# 显示图片
cv2.imshow('image', img)
cv2.waitKey()
else: # 标签数据,不处理
pass
# print('key: %s label: %s' % (key, value))
|
package main
type ParkingSystem struct {
big int
medium int
small int
}
func Constructor(big int, medium int, small int) ParkingSystem {
return ParkingSystem{
big, medium, small,
}
}
func (this *ParkingSystem) AddCar(carType int) bool {
if carType == 1 && this.big > 1 {
this.big--
return true
} else if carType == 2 && this.medium > 1 {
this.medium--
return true
} else if carType == 3 && this.small > 1 {
this.small--
return true
}
return false
}
|
<filename>tests/com.archimatetool.editor.tests/src/com/archimatetool/editor/ui/textrender/ViewpointRendererTests.java
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.editor.ui.textrender;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import com.archimatetool.model.IArchimateDiagramModel;
import com.archimatetool.model.IDiagramModelArchimateObject;
import junit.framework.JUnit4TestAdapter;
/**
* ViewpointRenderer Tests
*
* @author <NAME>
*/
@SuppressWarnings("nls")
public class ViewpointRendererTests extends AbstractTextRendererTests {
public static junit.framework.Test suite() {
return new JUnit4TestAdapter(ViewpointRendererTests.class);
}
private ViewpointRenderer renderer = new ViewpointRenderer();
@Override
protected ViewpointRenderer getRenderer() {
return renderer;
}
@Test
public void render_Viewpoint1() {
String result = renderer.render(TextRendererTests.createDiagramModelObject(), "${viewpoint}");
assertEquals("None", result);
}
@Test
public void render_Viewpoint2() {
String result = renderer.render(TextRendererTests.createDiagramModelConnection(), "${viewpoint}");
assertEquals("None", result);
}
@Test
public void render_Viewpoint3() {
IDiagramModelArchimateObject dmo = TextRendererTests.createDiagramModelObject();
((IArchimateDiagramModel)dmo.getDiagramModel()).setViewpoint("organization");
String result = renderer.render(dmo, "${viewpoint}");
assertEquals("Organization", result);
}
}
|
package com.github.alex1304.ultimategdbot.core;
import java.lang.management.ManagementFactory;
import java.time.Duration;
import java.util.Optional;
import javax.management.NotificationEmitter;
import javax.management.NotificationListener;
import javax.management.openmbean.CompositeData;
import com.sun.management.GarbageCollectionNotificationInfo;
import reactor.core.publisher.Flux;
import reactor.core.publisher.FluxSink;
import reactor.core.publisher.Mono;
import reactor.core.publisher.ReplayProcessor;
class MemoryStats {
private static final ReplayProcessor<MemoryStats> STATS = ReplayProcessor.cacheLastOrDefault(new MemoryStats());
private static final FluxSink<MemoryStats> STATS_SINK = STATS.sink(FluxSink.OverflowStrategy.LATEST);
private final long timestamp;
final long totalMemory;
final long usedMemory;
final long maxMemory;
private MemoryStats(long timestamp) {
var total = Runtime.getRuntime().totalMemory();
var free = Runtime.getRuntime().freeMemory();
var max = Runtime.getRuntime().maxMemory();
this.timestamp = timestamp;
this.totalMemory = total;
this.usedMemory = total - free;
this.maxMemory = max;
}
private MemoryStats() {
this.timestamp = -1;
this.totalMemory = Runtime.getRuntime().totalMemory();
this.usedMemory = 0;
this.maxMemory = Runtime.getRuntime().maxMemory();
}
Optional<Duration> elapsedSinceLastGC() {
return Optional.of(timestamp)
.filter(t -> t > 0)
.map(t -> Duration.ofMillis(ManagementFactory.getRuntimeMXBean().getUptime() - t));
}
static Mono<MemoryStats> getStats() {
return STATS.next();
}
static void start() {
Flux.<MemoryStats>create(sink -> {
NotificationListener gcListener = (notif, handback) -> {
if (notif.getType().equals(GarbageCollectionNotificationInfo.GARBAGE_COLLECTION_NOTIFICATION)) {
var gcInfo = GarbageCollectionNotificationInfo.from((CompositeData) notif.getUserData()).getGcInfo();
sink.next(new MemoryStats(gcInfo.getEndTime()));
}
};
ManagementFactory.getGarbageCollectorMXBeans()
.forEach(bean -> ((NotificationEmitter) bean).addNotificationListener(gcListener, null, null));
}).subscribe(STATS_SINK::next);
}
} |
SELECT name
FROM people
WHERE country = 'India'; |
<filename>src/server/modules/Fork/vos/ForkMessageCallbackWrapper.ts
import Dates from "../../../../shared/modules/FormatDatesNombres/Dates/Dates";
export default class ForkMessageCallbackWrapper {
public creation_time: number;
/**
* Wrapper for waiting for thread interaction
* @param resolver callback if succeeded
* @param thrower callback if failed
* @param timeout in secs (defaults to 300 / 5 minutes)
*/
public constructor(
public resolver: (result: any) => any,
public thrower: (result: any) => any,
public timeout: number = 300) {
this.creation_time = Dates.now();
}
} |
<gh_stars>0
'use strict';
var _20 = {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 20 20',
width: 20,
height: 20,
},
content: [
{
elem: 'path',
attrs: {
d:
'M10 1c-5 0-9 4-9 9s4 9 9 9 9-4 9-9-4-9-9-9zm-.8 4h1.5v7H9.2V5zm.8 11c-.6 0-1-.4-1-1s.4-1 1-1 1 .4 1 1-.4 1-1 1z',
},
},
{
elem: 'path',
attrs: {
d:
'M9.2 5h1.5v7H9.2V5zm.8 11c-.6 0-1-.4-1-1s.4-1 1-1 1 .4 1 1-.4 1-1 1z',
'data-icon-path': 'inner-path',
opacity: '0',
},
},
],
name: 'warning--filled',
size: 20,
};
module.exports = _20;
|
package cn.lts.mobile.util;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import com.alibaba.fastjson.JSONObject;
import cn.lts.common.constant.WeiXinConstants;
public class WXUpload {
/**
* 本地文件上传文件到微信服务器
* @param accessToken
* @param type
* @param file
* @return
*/
public static JSONObject uploadByFile(String accessToken, String type, File file){
JSONObject jsonObject = null;
String last_wechat_url = WeiXinConstants.UPLOAD_WEBCHAT_URL.replace("ACCESS_TOKEN", accessToken).replace("TYPE", type);
// 定义数据分割符
String boundary = "--------------------------";
try {
URL url = new URL(last_wechat_url);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
// 发送POST请求必须设置如下两行
conn.setDoOutput(true);
conn.setDoInput(true);
conn.setUseCaches(false);
conn.setRequestMethod("POST");
conn.setRequestProperty("connection", "Keep-Alive");
conn.setRequestProperty("user-agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1)");
conn.setRequestProperty("Charsert", "UTF-8");
conn.setRequestProperty("Content-Type", "multipart/form-data; boundary=" + boundary);
OutputStream out = new DataOutputStream(conn.getOutputStream());
byte[] end_data = ("\r\n--" + boundary + "--\r\n").getBytes();// 定义最后数据分隔线
StringBuilder sb = new StringBuilder();
sb.append("--");
sb.append(boundary);
sb.append("\r\n");
sb.append("Content-Disposition: form-data;name=\"media\";filename=\""+ file.getName() + "\"\r\n");
sb.append("Content-Type:application/octet-stream\r\n\r\n");
byte[] data = sb.toString().getBytes();
out.write(data);
DataInputStream in = new DataInputStream(new FileInputStream(file));
int bytes = 0;
byte[] bufferOut = new byte[1024*8];
while ((bytes = in.read(bufferOut)) != -1) {
out.write(bufferOut, 0, bytes);
}
out.write("\r\n".getBytes()); //多个文件时,二个文件之间加入这个
in.close();
out.write(end_data);
out.flush();
out.close();
// 定义BufferedReader输入流来读取URL的响应
BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String line = null;
StringBuffer buffer = new StringBuffer();
while ((line = reader.readLine()) != null) {
buffer.append(line);
}
// 使用json解析
jsonObject = JSONObject.parseObject(buffer.toString());
System.out.println(jsonObject);
} catch (Exception e) {
System.out.println("发送POST请求出现异常!" + e);
e.printStackTrace();
}
return jsonObject;
}
/**
* 服务器文件上传到微信服务器
* @param accessToken
* @param type
* @param fileUrl
* @return
*/
public static JSONObject uploadByUrl(String accessToken, String type, String fileUrl) {
JSONObject jsonObject = null;
String last_wechat_url = WeiXinConstants.UPLOAD_WEBCHAT_URL.replace("ACCESS_TOKEN", accessToken).replace("TYPE", type);
// 定义数据分割符
String boundary = "--------------------------";
try {
URL uploadUrl = new URL(last_wechat_url);
HttpURLConnection uploadConn = (HttpURLConnection) uploadUrl.openConnection();
uploadConn.setDoOutput(true);
uploadConn.setDoInput(true);
uploadConn.setRequestMethod("POST");
// 设置请求头Content-Type
uploadConn.setRequestProperty("Content-Type", "multipart/form-data;boundary=" + boundary);
// 获取媒体文件上传的输出流(往微信服务器写数据)
OutputStream outputStream = uploadConn.getOutputStream();
URL mediaUrl = new URL(fileUrl);
HttpURLConnection meidaConn = (HttpURLConnection) mediaUrl.openConnection();
meidaConn.setDoOutput(true);
meidaConn.setRequestMethod("GET");
// 从请求头中获取内容类型
String contentType = meidaConn.getHeaderField("Content-Type");
String filename=getFileName(fileUrl,contentType);
// 请求体开始
outputStream.write(("--" + boundary + "\r\n").getBytes());
outputStream.write(String.format("Content-Disposition: form-data; name=\"media\"; filename=\"%s\"\r\n", filename).getBytes());
outputStream.write(String.format("Content-Type: %s\r\n\r\n", contentType).getBytes());
// 获取媒体文件的输入流(读取文件)
BufferedInputStream bis = new BufferedInputStream(meidaConn.getInputStream());
byte[] buf = new byte[1024 * 8];
int size = 0;
while ((size = bis.read(buf)) != -1) {
// 将媒体文件写到输出流(往微信服务器写数据)
outputStream.write(buf, 0, size);
}
// 请求体结束
outputStream.write(("\r\n--" + boundary + "--\r\n").getBytes());
outputStream.close();
bis.close();
meidaConn.disconnect();
// 获取媒体文件上传的输入流(从微信服务器读数据)
InputStream inputStream = uploadConn.getInputStream();
InputStreamReader inputStreamReader = new InputStreamReader(inputStream, "utf-8");
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
StringBuffer buffer = new StringBuffer();
String str = null;
while ((str = bufferedReader.readLine()) != null) {
buffer.append(str);
}
bufferedReader.close();
inputStreamReader.close();
// 释放资源
inputStream.close();
inputStream = null;
uploadConn.disconnect();
// 使用json解析
jsonObject = JSONObject.parseObject(buffer.toString());
System.out.println(jsonObject);
} catch (Exception e) {
System.out.println("上传文件失败!");
e.printStackTrace();
}
return jsonObject;
}
public static String getFileName(String fileUrl,String contentType) {
String filename="";
if (fileUrl != null && !"".equals(fileUrl)) {
if(fileUrl.contains(".")){
filename = fileUrl.substring(fileUrl.lastIndexOf("/") + 1);
}else{
if(contentType==null || "".equals(contentType)){
return "";
}
String fileExt="";
if ("image/jpeg".equals(contentType)) {
fileExt = ".jpg";
} else if ("audio/mpeg".equals(contentType)) {
fileExt = ".mp3";
} else if ("audio/amr".equals(contentType)) {
fileExt = ".amr";
} else if ("video/mp4".equals(contentType)) {
fileExt = ".mp4";
} else if ("video/mpeg4".equals(contentType)) {
fileExt = ".mp4";
} else if ("text/plain".equals(contentType)) {
fileExt = ".txt";
} else if ("text/xml".equals(contentType)) {
fileExt = ".xml";
} else if ("application/pdf".equals(contentType)) {
fileExt = ".pdf";
} else if ("application/msword".equals(contentType)) {
fileExt = ".doc";
} else if ("application/vnd.ms-powerpoint".equals(contentType)) {
fileExt = ".ppt";
} else if ("application/vnd.ms-excel".equals(contentType)) {
fileExt = ".xls";
}
filename="Media文件"+fileExt;
}
}
return filename;
}
}
|
#!/bin/bash
echo "====================================================="
echo "==================== Dapr JS SDK ===================="
echo "====================================================="
echo "Executing in $(pwd)"
echo "Description: Build the package in build/"
echo "====================================================="
# Prepare build
echo "Preparing Build"
rm -rf build/
mkdir build/
# @todo: gRPC binding pulling and building?
# echo ""
# echo "Building Protobuf"
# ./build-grpc.sh
# Build Package
echo "Building Library"
npm install > /dev/null
npm run lint > /dev/null
tsc --outDir ./build/ > /dev/null
# Prepare Publish
echo "Preparing Publish"
cp package.json build/
cp README.md build/
# Copy Proto Files
cp -R ./src/proto/ ./build |
YUI.add('aui-form-builder-page-manager-tests', function(Y) {
var suite = new Y.Test.Suite('aui-form-builder-page-manager');
suite.add(new Y.Test.Case({
name: 'Form Builder Pages Tests',
init: function() {
this._container = Y.one('#container');
},
setUp: function() {
this._container.append('<div id="header"></div><div id="tabs"></div>' +
'<div id="pages"></div>');
},
tearDown: function() {
this._container.empty();
this._pages && this._pages.destroy();
},
/**
* Creates a new form builder pages instance with the given config.
*
* @method createFormBuilderPageManager
* @param {Object} config
* @return {Y.FormBuilderPageManager}
*/
createFormBuilderPageManager: function(config) {
this._pages = new Y.FormBuilderPageManager(config);
return this._pages;
},
/**
* Simulates a `valuechange` event for the given input.
*
* @method _simulateInputChange
* @param {Node} input The input node to simulate the event for.
* @param {String} text The text that should be set as the input's final value.
* @param {Function} callback The function to be called when the simulation is
* done.
* @protected
*/
_simulateInputChange: function(input, text, callback) {
input.simulate('keydown');
input.set('value', text);
input.simulate('keydown');
this.wait(callback, Y.ValueChange.POLL_INTERVAL);
},
'should update quantity': function() {
var pages,
title;
pages = this.createFormBuilderPageManager({
activePageNumber: 10,
pageHeader: '#header',
pagesQuantity: 10,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
title = pages.get('pageHeader').one('.form-builder-page-header-title');
Y.Assert.areEqual('Untitled page (10 of ' + 10 + ')', title.get('placeholder'));
pages.set('pagesQuantity', 20);
Y.Assert.areEqual('Untitled page (10 of ' + 20 + ')', title.get('placeholder'));
},
'should add a new page on addPage button clicked': function() {
this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 1,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
Y.Assert.areEqual(3, Y.one('.pagination-content').all('li').size());
Y.one('.form-builder-page-manager-add-last-position').simulate('click');
Y.Assert.areEqual(4, Y.one('.pagination-content').all('li').size());
},
'should show the popover with a list of options': function() {
this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 1,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
Y.Assert.isTrue(Y.one('.form-builder-page-manager-popover').hasClass('popover-hidden'));
Y.one('.form-builder-switch-view').simulate('click');
Y.Assert.isFalse(Y.one('.form-builder-page-manager-popover').hasClass('popover-hidden'));
},
'should remove the current page on removePage button clicked': function() {
this.createFormBuilderPageManager({
activePageNumber: 2,
pageHeader: '#header',
pagesQuantity: 2,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
Y.Assert.areEqual(4, Y.one('.pagination-content').all('li').size());
Y.one('.form-builder-page-manager-delete-page').simulate('click');
Y.Assert.areEqual(3, Y.one('.pagination-content').all('li').size());
Y.one('.form-builder-page-manager-delete-page').simulate('click');
Y.Assert.areEqual(3, Y.one('.pagination-content').all('li').size());
},
'should add a new tab on addPage button clicked': function() {
this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 1,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
Y.Assert.areEqual(1, Y.one('.tabbable-content').all('.tab').size());
Y.one('.form-builder-page-manager-add-last-position').simulate('click');
Y.Assert.areEqual(2, Y.one('.tabbable-content').all('.tab').size());
},
'should remove the current tab on removePage button clicked': function() {
this.createFormBuilderPageManager({
activePageNumber: 2,
pageHeader: '#header',
pagesQuantity: 2,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
Y.Assert.areEqual(2, Y.one('.tabbable-content').all('.tab').size());
Y.one('.form-builder-page-manager-delete-page').simulate('click');
Y.Assert.areEqual(1, Y.one('.tabbable-content').all('.tab').size());
Y.one('.form-builder-page-manager-delete-page').simulate('click');
Y.Assert.areEqual(1, Y.one('.tabbable-content').all('.tab').size());
},
'should update `title` attribute on title input change': function() {
var pages,
titleNode;
pages = this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 1,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
titleNode = Y.one('.form-builder-page-header-title');
Y.Assert.areEqual('Untitled page (1 of 1)', titleNode.get('placeholder'));
this._simulateInputChange(titleNode, 'title', function() {
Y.Assert.areEqual('title', pages.get('titles')[0]);
this._simulateInputChange(titleNode, '', function() {
Y.Assert.areEqual('Untitled page (1 of 1)', titleNode.get('placeholder'));
});
});
},
'should update tab title on title of the current page change': function() {
var pages,
titleNode;
pages = this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 1,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
Y.one('.form-builder-page-manager-switch-mode').simulate('click');
titleNode = Y.one('.form-builder-page-header-title');
Y.Assert.areEqual('Untitled page (1 of 1)', titleNode.get('placeholder'));
this._simulateInputChange(titleNode, 'title', function() {
Y.Assert.areEqual('1.title', Y.one('.tab-label').text());
this._simulateInputChange(titleNode, '', function() {
Y.Assert.areEqual('1.Untitled page (1 of 1)', Y.one('.tab-label').text());
});
});
},
'should show/hide the tabview/pagination on switch button clicked': function() {
var pages;
pages = this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 1,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
Y.Assert.isFalse(Y.one('.pagination-content').hasClass('hide'));
Y.Assert.isTrue(Y.one('.tabbable-content').hasClass('hide'));
Y.one('.form-builder-page-manager-switch-mode').simulate('click');
Y.Assert.isTrue(Y.one('.pagination-content').hasClass('hide'));
Y.Assert.isFalse(Y.one('.tabbable-content').hasClass('hide'));
Y.one('.form-builder-page-manager-switch-mode').simulate('click');
Y.Assert.isFalse(Y.one('.pagination-content').hasClass('hide'));
Y.Assert.isTrue(Y.one('.tabbable-content').hasClass('hide'));
},
'should show the right title on page change': function() {
var titleNode;
this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 2,
paginationContainer: '#pages',
tabviewContainer: '#tabs',
titles: ['Title', '']
});
titleNode = Y.one('.form-builder-page-header-title');
Y.Assert.areEqual('Title', titleNode.get('value'));
Y.one('.pagination-control').simulate('click');
Y.Assert.areEqual('Untitled page (2 of 2)', titleNode.get('placeholder'));
},
'should update `descriptions` attribute on title input change': function() {
var descriptionNode,
pages;
pages = this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 1,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
descriptionNode = Y.one('.form-builder-page-header-description');
Y.Assert.areEqual('An aditional info about this page', descriptionNode.get('placeholder'));
this._simulateInputChange(descriptionNode, 'descriptions', function() {
Y.Assert.areEqual('descriptions', pages.get('descriptions')[0]);
this._simulateInputChange(descriptionNode, '', function() {
Y.Assert.areEqual('An aditional info about this page', descriptionNode.get('placeholder'));
});
});
},
'should initialize with pagination tabs if the default option was replaced': function() {
this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 1,
paginationContainer: '#pages',
tabviewContainer: '#tabs',
mode: 'tabs'
});
Y.Assert.isFalse(Y.one('.tabbable-content').hasClass('hide'));
Y.Assert.isTrue(Y.one('.pagination-content').hasClass('hide'));
},
'should not mode attribute accept values not equal to `pagination` or `tabs`': function() {
var pages;
pages = this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 1,
paginationContainer: '#pages',
tabviewContainer: '#tabs',
mode: 'numbers'
});
Y.Assert.areNotEqual(pages.get('mode'), 'numbers');
},
'should the active tab match with current active page after switch page mode from pagination to tabs': function() {
this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 2,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
Y.all('.pagination-control').item(1).simulate('click');
Y.one('.form-builder-page-manager-switch-mode').simulate('click');
Y.Assert.isTrue(Y.all('.nav.nav-tabs .tab').item(1).hasClass('active'));
},
'should the active page match with current active tab after page mode change from tabs to pagination': function() {
this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 2,
paginationContainer: '#pages',
tabviewContainer: '#tabs',
mode: 'tabs'
});
Y.all('.nav.nav-tabs .tab').item(1).simulate('click');
Y.one('.form-builder-page-manager-switch-mode').simulate('click');
Y.Assert.isTrue(Y.all('.pagination.pagination-content li').item(2).hasClass('active'));
},
'should show `Reset page` button if the page quantity is equal to one': function() {
this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 1,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
Y.Assert.areEqual('Reset page', Y.one('.form-builder-page-manager-delete-page').text());
},
'should show `Delete current page` button if the page quantity is greater than one': function() {
this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 2,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
Y.Assert.areEqual('Delete current page', Y.one('.form-builder-page-manager-delete-page').text());
},
'should show `Reset page` button dynamically after deleting the second page': function() {
this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 2,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
Y.one('.form-builder-page-manager-delete-page').simulate('click');
Y.Assert.areEqual('Reset page', Y.one('.form-builder-page-manager-delete-page').text());
},
'should show `Delete current page` button dynamically after adding a second page': function() {
this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 1,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
Y.one('.form-builder-page-manager-add-last-position').simulate('click');
Y.Assert.areEqual('Delete current page', Y.one('.form-builder-page-manager-delete-page').text());
},
'should disable Pagination and TabView when `disablePaginations` is called': function() {
var pages = this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 3,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
pages.disablePaginations();
Y.Assert.areEqual(Y.all('.pagination-content li.disabled').size(), 5);
Y.Assert.areEqual(Y.all('.tabbable-content li.disabled').size(), 3);
},
'should enable Pagination and TabView when `disablePaginations` is called': function() {
var pages = this.createFormBuilderPageManager({
activePageNumber: 1,
pageHeader: '#header',
pagesQuantity: 3,
paginationContainer: '#pages',
tabviewContainer: '#tabs'
});
pages.enablePaginations();
Y.Assert.areEqual(Y.all('.pagination-content li.disabled').size(), 0);
Y.Assert.areEqual(Y.all('.tabbable-content li.disabled').size(), 0);
}
}));
Y.Test.Runner.add(suite);
}, '', {
requires: ['aui-form-builder-page-manager', 'node-event-simulate', 'test'],
test: function(Y) {
return Y.UA.ie === 0 || Y.UA.ie > 8;
}
});
|
/*
Copyright IBM Corp. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package pvtdatastorage
import (
"bytes"
"encoding/binary"
"math"
"github.com/Yunpeng-J/fabric-protos-go/ledger/rwset"
"github.com/Yunpeng-J/HLF-2.2/core/ledger/internal/version"
"github.com/golang/protobuf/proto"
"github.com/pkg/errors"
"github.com/willf/bitset"
)
var (
pendingCommitKey = []byte{0}
lastCommittedBlkkey = []byte{1}
pvtDataKeyPrefix = []byte{2}
expiryKeyPrefix = []byte{3}
elgPrioritizedMissingDataGroup = []byte{4}
inelgMissingDataGroup = []byte{5}
collElgKeyPrefix = []byte{6}
lastUpdatedOldBlocksKey = []byte{7}
elgDeprioritizedMissingDataGroup = []byte{8}
nilByte = byte(0)
emptyValue = []byte{}
)
func getDataKeysForRangeScanByBlockNum(blockNum uint64) ([]byte, []byte) {
startKey := append(pvtDataKeyPrefix, version.NewHeight(blockNum, 0).ToBytes()...)
endKey := append(pvtDataKeyPrefix, version.NewHeight(blockNum+1, 0).ToBytes()...)
return startKey, endKey
}
func getExpiryKeysForRangeScan(minBlkNum, maxBlkNum uint64) ([]byte, []byte) {
startKey := append(expiryKeyPrefix, version.NewHeight(minBlkNum, 0).ToBytes()...)
endKey := append(expiryKeyPrefix, version.NewHeight(maxBlkNum+1, 0).ToBytes()...)
return startKey, endKey
}
func encodeLastCommittedBlockVal(blockNum uint64) []byte {
return proto.EncodeVarint(blockNum)
}
func decodeLastCommittedBlockVal(blockNumBytes []byte) uint64 {
s, _ := proto.DecodeVarint(blockNumBytes)
return s
}
func encodeDataKey(key *dataKey) []byte {
dataKeyBytes := append(pvtDataKeyPrefix, version.NewHeight(key.blkNum, key.txNum).ToBytes()...)
dataKeyBytes = append(dataKeyBytes, []byte(key.ns)...)
dataKeyBytes = append(dataKeyBytes, nilByte)
return append(dataKeyBytes, []byte(key.coll)...)
}
func encodeDataValue(collData *rwset.CollectionPvtReadWriteSet) ([]byte, error) {
return proto.Marshal(collData)
}
func encodeExpiryKey(expiryKey *expiryKey) []byte {
// reusing version encoding scheme here
return append(expiryKeyPrefix, version.NewHeight(expiryKey.expiringBlk, expiryKey.committingBlk).ToBytes()...)
}
func encodeExpiryValue(expiryData *ExpiryData) ([]byte, error) {
return proto.Marshal(expiryData)
}
func decodeExpiryKey(expiryKeyBytes []byte) (*expiryKey, error) {
height, _, err := version.NewHeightFromBytes(expiryKeyBytes[1:])
if err != nil {
return nil, err
}
return &expiryKey{expiringBlk: height.BlockNum, committingBlk: height.TxNum}, nil
}
func decodeExpiryValue(expiryValueBytes []byte) (*ExpiryData, error) {
expiryData := &ExpiryData{}
err := proto.Unmarshal(expiryValueBytes, expiryData)
return expiryData, err
}
func decodeDatakey(datakeyBytes []byte) (*dataKey, error) {
v, n, err := version.NewHeightFromBytes(datakeyBytes[1:])
if err != nil {
return nil, err
}
blkNum := v.BlockNum
tranNum := v.TxNum
remainingBytes := datakeyBytes[n+1:]
nilByteIndex := bytes.IndexByte(remainingBytes, nilByte)
ns := string(remainingBytes[:nilByteIndex])
coll := string(remainingBytes[nilByteIndex+1:])
return &dataKey{nsCollBlk{ns, coll, blkNum}, tranNum}, nil
}
func decodeDataValue(datavalueBytes []byte) (*rwset.CollectionPvtReadWriteSet, error) {
collPvtdata := &rwset.CollectionPvtReadWriteSet{}
err := proto.Unmarshal(datavalueBytes, collPvtdata)
return collPvtdata, err
}
func encodeElgPrioMissingDataKey(key *missingDataKey) []byte {
// When missing pvtData reconciler asks for missing data info,
// it is necessary to pass the missing pvtdata info associated with
// the most recent block so that missing pvtdata in the state db can
// be fixed sooner to reduce the "private data matching public hash version
// is not available" error during endorserments. In order to give priority
// to missing pvtData in the most recent block, we use reverse order
// preserving encoding for the missing data key. This simplifies the
// implementation of GetMissingPvtDataInfoForMostRecentBlocks().
encKey := append(elgPrioritizedMissingDataGroup, encodeReverseOrderVarUint64(key.blkNum)...)
encKey = append(encKey, []byte(key.ns)...)
encKey = append(encKey, nilByte)
return append(encKey, []byte(key.coll)...)
}
func encodeElgDeprioMissingDataKey(key *missingDataKey) []byte {
encKey := append(elgDeprioritizedMissingDataGroup, encodeReverseOrderVarUint64(key.blkNum)...)
encKey = append(encKey, []byte(key.ns)...)
encKey = append(encKey, nilByte)
return append(encKey, []byte(key.coll)...)
}
func decodeElgMissingDataKey(keyBytes []byte) *missingDataKey {
key := &missingDataKey{nsCollBlk: nsCollBlk{}}
blkNum, numBytesConsumed := decodeReverseOrderVarUint64(keyBytes[1:])
splittedKey := bytes.Split(keyBytes[numBytesConsumed+1:], []byte{nilByte})
key.ns = string(splittedKey[0])
key.coll = string(splittedKey[1])
key.blkNum = blkNum
return key
}
func encodeInelgMissingDataKey(key *missingDataKey) []byte {
encKey := append(inelgMissingDataGroup, []byte(key.ns)...)
encKey = append(encKey, nilByte)
encKey = append(encKey, []byte(key.coll)...)
encKey = append(encKey, nilByte)
return append(encKey, []byte(encodeReverseOrderVarUint64(key.blkNum))...)
}
func decodeInelgMissingDataKey(keyBytes []byte) *missingDataKey {
key := &missingDataKey{nsCollBlk: nsCollBlk{}}
splittedKey := bytes.SplitN(keyBytes[1:], []byte{nilByte}, 3) //encoded bytes for blknum may contain empty bytes
key.ns = string(splittedKey[0])
key.coll = string(splittedKey[1])
key.blkNum, _ = decodeReverseOrderVarUint64(splittedKey[2])
return key
}
func encodeMissingDataValue(bitmap *bitset.BitSet) ([]byte, error) {
return bitmap.MarshalBinary()
}
func decodeMissingDataValue(bitmapBytes []byte) (*bitset.BitSet, error) {
bitmap := &bitset.BitSet{}
if err := bitmap.UnmarshalBinary(bitmapBytes); err != nil {
return nil, err
}
return bitmap, nil
}
func encodeCollElgKey(blkNum uint64) []byte {
return append(collElgKeyPrefix, encodeReverseOrderVarUint64(blkNum)...)
}
func decodeCollElgKey(b []byte) uint64 {
blkNum, _ := decodeReverseOrderVarUint64(b[1:])
return blkNum
}
func encodeCollElgVal(m *CollElgInfo) ([]byte, error) {
return proto.Marshal(m)
}
func decodeCollElgVal(b []byte) (*CollElgInfo, error) {
m := &CollElgInfo{}
if err := proto.Unmarshal(b, m); err != nil {
return nil, errors.WithStack(err)
}
return m, nil
}
func createRangeScanKeysForElgMissingData(blkNum uint64, group []byte) ([]byte, []byte) {
startKey := append(group, encodeReverseOrderVarUint64(blkNum)...)
endKey := append(group, encodeReverseOrderVarUint64(0)...)
return startKey, endKey
}
func createRangeScanKeysForInelgMissingData(maxBlkNum uint64, ns, coll string) ([]byte, []byte) {
startKey := encodeInelgMissingDataKey(
&missingDataKey{
nsCollBlk: nsCollBlk{
ns: ns,
coll: coll,
blkNum: maxBlkNum,
},
},
)
endKey := encodeInelgMissingDataKey(
&missingDataKey{
nsCollBlk: nsCollBlk{
ns: ns,
coll: coll,
blkNum: 0,
},
},
)
return startKey, endKey
}
func createRangeScanKeysForCollElg() (startKey, endKey []byte) {
return encodeCollElgKey(math.MaxUint64),
encodeCollElgKey(0)
}
func datakeyRange(blockNum uint64) ([]byte, []byte) {
startKey := append(pvtDataKeyPrefix, version.NewHeight(blockNum, 0).ToBytes()...)
endKey := append(pvtDataKeyPrefix, version.NewHeight(blockNum, math.MaxUint64).ToBytes()...)
return startKey, endKey
}
func eligibleMissingdatakeyRange(blkNum uint64) ([]byte, []byte) {
startKey := append(elgPrioritizedMissingDataGroup, encodeReverseOrderVarUint64(blkNum)...)
endKey := append(elgPrioritizedMissingDataGroup, encodeReverseOrderVarUint64(blkNum-1)...)
return startKey, endKey
}
// encodeReverseOrderVarUint64 returns a byte-representation for a uint64 number such that
// the number is first subtracted from MaxUint64 and then all the leading 0xff bytes
// are trimmed and replaced by the number of such trimmed bytes. This helps in reducing the size.
// In the byte order comparison this encoding ensures that EncodeReverseOrderVarUint64(A) > EncodeReverseOrderVarUint64(B),
// If B > A
func encodeReverseOrderVarUint64(number uint64) []byte {
bytes := make([]byte, 8)
binary.BigEndian.PutUint64(bytes, math.MaxUint64-number)
numFFBytes := 0
for _, b := range bytes {
if b != 0xff {
break
}
numFFBytes++
}
size := 8 - numFFBytes
encodedBytes := make([]byte, size+1)
encodedBytes[0] = proto.EncodeVarint(uint64(numFFBytes))[0]
copy(encodedBytes[1:], bytes[numFFBytes:])
return encodedBytes
}
// decodeReverseOrderVarUint64 decodes the number from the bytes obtained from function 'EncodeReverseOrderVarUint64'.
// Also, returns the number of bytes that are consumed in the process
func decodeReverseOrderVarUint64(bytes []byte) (uint64, int) {
s, _ := proto.DecodeVarint(bytes)
numFFBytes := int(s)
decodedBytes := make([]byte, 8)
realBytesNum := 8 - numFFBytes
copy(decodedBytes[numFFBytes:], bytes[1:realBytesNum+1])
numBytesConsumed := realBytesNum + 1
for i := 0; i < numFFBytes; i++ {
decodedBytes[i] = 0xff
}
return (math.MaxUint64 - binary.BigEndian.Uint64(decodedBytes)), numBytesConsumed
}
|
'use strict';
/**
* hasOwnProperty reference.
*/
var has = Object.prototype.hasOwnProperty;
/**
* Split reference.
*/
var split = String.prototype.split;
/**
* Given a series of paths, look up a property on a potentially nested object.
*
* @name locate
* @api private
* @param {String[]} paths
* @param {Object} obj
* @return {*} The property if found; otherwise `undefined`.
* @example
* var person = {
* name: '<NAME>',
* address: {
* coordinates: {
* x: 10,
* y: 20
* }
* }
* };
*
* locate(['address', 'coordinates', 'x'], person);
* //=> 10
*/
var locate = function locate(paths, obj) {
if (obj == null) {
return undefined;
}
var val = obj;
for (var i = 0; i < paths.length; i += 1) {
if (val == null || !has.call(val, paths[i])) {
return undefined;
}
val = val == null ? val : val[paths[i]];
}
return val;
};
/**
* Find a path on an object given a separator.
*
* @name retrieveOn
* @api public
* @param {string} separator
* @param {string} str
* @param {object} obj
* @return {undefined}
* @example
* var person = {
* name: '<NAME>',
* address: {
* coordinates: {
* x: 10,
* y: 20
* }
* }
* };
*
* retrieveOn('/', 'address/coordinates/x', person);
* //=> 10
*/
var retrieveOn = function retrieveOn(separator, str, obj) {
return locate(split.call(str, separator), obj);
};
/**
* Retrieve up a property on an object located at a given path.
*
* @name retrieve
* @api public
* @param {string} str The dot-delimited path to search for.
* @param {Object} obj The object to search.
* @return {*} The property if found; otherwise `undefined`.
* @example
* var person = {
* name: '<NAME>',
* address: {
* coordinates: {
* x: 10,
* y: 20
* }
* }
* };
*
* retrieve('address.coordinates.y', person);
* //=> 20
*/
var retrieve = function retrieve(str, obj) {
return retrieveOn('.', str, obj);
};
/**
* Exports.
*/
module.exports = retrieve;
module.exports.on = retrieveOn;
|
'use strict';
const SolutionExplorerService = require('./dist/index').SolutionExplorerService;
function registerInContainer(container) {
container.register('SolutionExplorer', SolutionExplorerService)
.dependencies('SolutionExplorer.Repository');
}
module.exports.registerInContainer = registerInContainer;
|
#!/bin/bash
# Argument parsing
if [ "$1" = "jacoco" ]
then
JACOCO_ENABLED=true
else
JACOCO_ENABLED=false
fi
# Clean out any old sandbox, make a new one
OUTDIR=sandbox/single
MKDIR=mkdir
SEP=:
case "`uname`" in
CYGWIN* )
MKDIR=mkdir.exe
SEP=";"
;;
esac
rm -fr $OUTDIR
$MKDIR -p $OUTDIR
function cleanup () {
RC="`paste $OUTDIR/status.* | sed 's/[[:blank:]]//g'`"
if [ "$RC" != "00000" ]; then
cat $OUTDIR/out.*
echo h2o-algos junit tests FAILED
exit 1
else
echo h2o-algos junit tests PASSED
exit 0
fi
}
trap cleanup SIGTERM SIGINT
# Find java command
if [ -z "$TEST_JAVA_HOME" ]; then
# Use default
JAVA_CMD="java"
else
# Use test java home
JAVA_CMD="$TEST_JAVA_HOME/bin/java"
# Increase XMX since JAVA_HOME can point to java6
JAVA6_REGEXP=".*1\.6.*"
if [[ $TEST_JAVA_HOME =~ $JAVA6_REGEXP ]]; then
JAVA_CMD="${JAVA_CMD}"
fi
fi
# Gradle puts files:
# build/classes/main - Main h2o core classes
# build/classes/test - Test h2o core classes
# build/resources/main - Main resources (e.g. page.html)
MAX_MEM=${H2O_JVM_XMX:-2200m}
# Check if coverage should be run
if [ $JACOCO_ENABLED = true ]
then
AGENT="../jacoco/jacocoagent.jar"
COVERAGE="-javaagent:$AGENT=destfile=build/jacoco/h2o-algos.exec"
MAX_MEM=${H2O_JVM_XMX:-8g}
else
COVERAGE=""
fi
JVM="nice $JAVA_CMD $COVERAGE -ea -Xmx${MAX_MEM} -Xms${MAX_MEM} -cp ${JVM_CLASSPATH}"
echo "$JVM" > $OUTDIR/jvm_cmd.txt
# Ahhh... but the makefile runs the tests skipping the jar'ing step when possible.
# Also, sometimes see test files in the main-class directory, so put the test
# classpath before the main classpath.
#JVM="nice java -ea -cp build/classes/test${SEP}build/classes/main${SEP}../h2o-core/build/classes/test${SEP}../h2o-core/build/classes/main${SEP}../lib/*"
# Tests
# Must run first, before the cloud locks (because it tests cloud locking)
JUNIT_TESTS_BOOT="hex.AAA_PreCloudLock"
JUNIT_TESTS_BIG="hex.word2vec.Word2VecTest"
# Runner
# Default JUnit runner is org.junit.runner.JUnitCore
JUNIT_RUNNER="water.junit.H2OTestRunner"
# find all java in the src/test directory
# Cut the "./water/MRThrow.java" down to "water/MRThrow.java"
# Cut the "water/MRThrow.java" down to "water/MRThrow"
# Slash/dot "water/MRThrow" becomes "water.MRThrow"
# On this h2o-algos testMultiNode.sh only, force the tests.txt to be in the same order for all machines.
# If sorted, the result of the cd/grep varies by machine.
# If randomness is desired, replace sort with the unix 'shuf'
# Use /usr/bin/sort because of cygwin on windows.
# Windows has sort.exe which you don't want. Fails? (is it a lineend issue)
(cd src/test/java; /usr/bin/find . -name '*.java' | cut -c3- | sed 's/.....$//' | sed -e 's/\//./g') | grep -v $JUNIT_TESTS_BOOT | grep -v $JUNIT_TESTS_BIG | /usr/bin/sort | grep -v AAA_PreCloudLock > $OUTDIR/tests.txt
# Output the comma-separated list of ignored/dooonly tests
# Ignored tests trump do-only tests
echo $IGNORE > $OUTDIR/tests.ignore.txt
echo $DOONLY > $OUTDIR/tests.doonly.txt
# Launch 4 helper JVMs. All output redir'd at the OS level to sandbox files.
CLUSTER_NAME=junit_cluster_$$
CLUSTER_BASEPORT=44000
# If coverage is being run, then pass a system variable flag so that timeout limits are increased.
if [ $JACOCO_ENABLED = true ]
then
JACOCO_FLAG="-Dtest.jacocoEnabled=true"
else
JACOCO_FLAG=""
fi
# Launch last driver JVM. All output redir'd at the OS level to sandbox files.
echo Running h2o-algos junit tests...
($JVM -Ddoonly.tests=$DOONLY -Dbuild.id=$BUILD_ID -Dignore.tests=$IGNORE -Djob.name=$JOB_NAME -Dgit.commit=$GIT_COMMIT -Dgit.branch=$GIT_BRANCH -Dai.h2o.name=$CLUSTER_NAME.1 -Dai.h2o.baseport=$CLUSTER_BASEPORT -Dai.h2o.ga_opt_out=yes $JACOCO_FLAG $JUNIT_RUNNER `cat $OUTDIR/tests.txt | awk 'NR%5==0'` 2>&1 ; echo $? > $OUTDIR/status.1) 1> $OUTDIR/out.1 2>&1 & PID_1=$!
($JVM -Ddoonly.tests=$DOONLY -Dbuild.id=$BUILD_ID -Dignore.tests=$IGNORE -Djob.name=$JOB_NAME -Dgit.commit=$GIT_COMMIT -Dgit.branch=$GIT_BRANCH -Dai.h2o.name=$CLUSTER_NAME.2 -Dai.h2o.baseport=$CLUSTER_BASEPORT -Dai.h2o.ga_opt_out=yes $JACOCO_FLAG $JUNIT_RUNNER `cat $OUTDIR/tests.txt | awk 'NR%5==1'` 2>&1 ; echo $? > $OUTDIR/status.2) 1> $OUTDIR/out.2 2>&1 & PID_2=$!
($JVM -Ddoonly.tests=$DOONLY -Dbuild.id=$BUILD_ID -Dignore.tests=$IGNORE -Djob.name=$JOB_NAME -Dgit.commit=$GIT_COMMIT -Dgit.branch=$GIT_BRANCH -Dai.h2o.name=$CLUSTER_NAME.3 -Dai.h2o.baseport=$CLUSTER_BASEPORT -Dai.h2o.ga_opt_out=yes $JACOCO_FLAG $JUNIT_RUNNER `cat $OUTDIR/tests.txt | awk 'NR%5==2'` 2>&1 ; echo $? > $OUTDIR/status.3) 1> $OUTDIR/out.3 2>&1 & PID_3=$!
($JVM -Ddoonly.tests=$DOONLY -Dbuild.id=$BUILD_ID -Dignore.tests=$IGNORE -Djob.name=$JOB_NAME -Dgit.commit=$GIT_COMMIT -Dgit.branch=$GIT_BRANCH -Dai.h2o.name=$CLUSTER_NAME.4 -Dai.h2o.baseport=$CLUSTER_BASEPORT -Dai.h2o.ga_opt_out=yes $JACOCO_FLAG $JUNIT_RUNNER `cat $OUTDIR/tests.txt | awk 'NR%5==3'` 2>&1 ; echo $? > $OUTDIR/status.4) 1> $OUTDIR/out.4 2>&1 & PID_4=$!
($JVM -Ddoonly.tests=$DOONLY -Dbuild.id=$BUILD_ID -Dignore.tests=$IGNORE -Djob.name=$JOB_NAME -Dgit.commit=$GIT_COMMIT -Dgit.branch=$GIT_BRANCH -Dai.h2o.name=$CLUSTER_NAME.5 -Dai.h2o.baseport=$CLUSTER_BASEPORT -Dai.h2o.ga_opt_out=yes $JACOCO_FLAG $JUNIT_RUNNER `cat $OUTDIR/tests.txt | awk 'NR%5==4'` 2>&1 ; echo $? > $OUTDIR/status.5) 1> $OUTDIR/out.5 2>&1 & PID_5=$!
wait ${PID_1} ${PID_2} ${PID_3} ${PID_4} ${PID_5} 1> /dev/null 2>&1
grep EXECUTION $OUTDIR/out.* | sed -e "s/.*TEST \(.*\) EXECUTION TIME: \(.*\) (Wall.*/\2 \1/" | sort -gr | head -n 10 >> $OUTDIR/out.0
cleanup
|
#!/bin/sh
echo "Generate Interaction Molecule and Pathway files for Reactome \n"
perl splitXML.pl Reactome Reactome.xml
echo "Generate Interaction Molecule and Pathway files for NCI-Nature \n"
perl splitXML.pl NCI_Nature NCI-Nature_Curated.xml
echo "Generate Interaction Molecule and Pathway files for BioCarta \n"
perl splitXML.pl BioCarta BioCarta.xml
perl MoleculeParser.pl NCI_Nature Molecules.xml
perl InteractionParser.pl NCI_Nature Interactions.xml
perl PathwayParser.pl NCI_Nature Pathways.xml
perl MoleculeParser.pl BioCarta Molecules.xml
perl InteractionParser.pl BioCarta Interactions.xml
perl PathwayParser.pl BioCarta Pathways.xml
perl MoleculeParser.pl Reactome Molecules.xml
perl InteractionParser.pl Reactome Interactions.xml
perl PathwayParser.pl Reactome Pathways.xml
|
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=/home/karan/Downloads/flutter_linux_v1.9.1+hotfix.4-stable/flutter"
export "FLUTTER_APPLICATION_PATH=/media/karan/DATA/flutter folk/flutter_keyboard_visibility-master/example"
export "FLUTTER_TARGET=lib/main.dart"
export "FLUTTER_BUILD_DIR=build"
export "SYMROOT=${SOURCE_ROOT}/../build/ios"
export "FLUTTER_FRAMEWORK_DIR=/home/karan/Downloads/flutter_linux_v1.9.1+hotfix.4-stable/flutter/bin/cache/artifacts/engine/ios"
export "FLUTTER_BUILD_NAME=1.0.0"
export "FLUTTER_BUILD_NUMBER=1"
|
for i in {1..100};
do
clustalo --auto -i sim\_$i/human_gapdh_Unaligned.FASTA -o sim\_$i/clustal/nucleotide/auto.fasta
mafft --localpair --maxiterate 100 sim\_$i/human_gapdh_Unaligned.FASTA > sim\_$i/mafft/nucleotide/local_max100.fasta
mafft --maxiterate 100 sim\_$i/human_gapdh_Unaligned.FASTA > sim\_$i/mafft/nucleotide/global_max100.fasta
muscle -maxiters 100 -in sim\_$i/human_gapdh_Unaligned.FASTA -out sim\_$i/muscle/nucleotide/max100.fasta
done
|
#!/bin/bash
cd $(dirname ${0})
rm -rf LambdaTool.egg-info/ build/ dist/
find . -name .ropeproject -type d | xargs rm -rf
find . -name "*.pyc" -type f | xargs rm -f
|
<reponame>zentrillion/azure-iot-samples-node<filename>iot-hub/Quickstarts/read-d2c-messages/node_modules/azure-event-hubs/typings/lib/auth/aad.d.ts
import { ApplicationTokenCredentials, DeviceTokenCredentials, UserTokenCredentials, MSITokenCredentials } from "ms-rest-azure";
import { TokenInfo, TokenProvider } from "./token";
/**
* Defines the AAD (Azure ActiveDirectory) TokenProvider.
* @class AadTokenProvider
*/
export declare class AadTokenProvider implements TokenProvider {
/**
* @property {(ApplicationTokenCredentials | UserTokenCredentials | DeviceTokenCredentials | MSITokenCredentials)} credentials - The credentials object after successful authentication with AAD.
*/
credentials: ApplicationTokenCredentials | UserTokenCredentials | DeviceTokenCredentials | MSITokenCredentials;
/**
* @property {number} tokenRenewalMarginInSeconds - The number of seconds within which it is
* good to renew the token. A constant set to 270 seconds (4.5 minutes). Adal has a set window of 5 minutes
* when it refreshes the token from its token cache.
*/
readonly tokenRenewalMarginInSeconds: number;
/**
* @property {number} tokenValidTimeInSeconds - The number of seconds for which the
* token is valid. A constant set to 3599 seconds (~1 hour). Adal has a set valid time of
* 1 hour (3600 seconds) when it refreshes the access token.
*/
readonly tokenValidTimeInSeconds: number;
constructor(credentials: ApplicationTokenCredentials | UserTokenCredentials | DeviceTokenCredentials | MSITokenCredentials);
/**
* Gets the jwt token for the specified audience
* @param {string} [audience] - The audience for which the token is desired. If not
* provided then the Endpoint from the connection string will be applied.
*/
getToken(audience?: string): Promise<TokenInfo>;
}
|
<filename>provision.go
// Go library for the OnePlatform Provisioning API
// http://docs.exosite.com/provision/
package goonep
import (
// "fmt"
"io/ioutil"
"log"
"net/http"
"regexp"
"strconv"
"strings"
"time"
// "net/http/httputil"
)
var VendorToken = ""
var PROVISION_BASE = "/provision"
var PROVISION_ACTIVATE = PROVISION_BASE + "/activate"
var PROVISION_DOWNLOAD = PROVISION_BASE + "/download"
var PROVISION_MANAGE = PROVISION_BASE + "/manage"
var PROVISION_MANAGE_MODEL = PROVISION_MANAGE + "/model/"
var PROVISION_MANAGE_CONTENT = PROVISION_MANAGE + "/content/"
var PROVISION_REGISTER = PROVISION_BASE + "/register"
var Pool struct {
Models map[string]*ProvModel
}
type ProvContent struct{}
type ProvGroup struct{}
type ProvModel struct {
RawData string
ActiveStatus string
Rid string
SN string
ExtraField string
TimeStamp int64
managebycik bool
managebysharecode bool
url string
}
func (m *ProvModel) GetPath() string {
return "manage/model"
}
// Find is a helper function for finding model with characteristics contained in string argument
func (m *ProvModel) Find(modelName, id string) ProvModel {
if Pool.Models[id] != nil {
return *Pool.Models[id]
}
fetchedModel := ProvModel{}
if len(id) <= 0 {
log.Printf("Try find a non-sense ID: %d ", id)
return ProvModel{}
}
var headers = http.Header{}
result, err := ProvCall(m.GetPath()+"/"+modelName+"/"+id, VendorToken, "", "GET", false, headers)
if err != nil {
log.Printf("Finding model(id: %s) met some error %v", id, err)
return fetchedModel
}
rawData := strings.Trim(string(result.([]uint8)), "\r\n")
if rawData == "HTTP/1.1 404 Not Found" {
return fetchedModel
}
fetchedModel.Parse(rawData)
fetchedModel.SN = id
fetchedModel.TimeStamp = time.Now().Unix()
return fetchedModel
}
func (m *ProvModel) Parse(RawData string) {
if len(RawData) <= 0 {
return
}
m.RawData = RawData
extraFieldFetcher := regexp.MustCompile("([a-zA-Z0-9]+,){2}")
m.ExtraField = strings.Trim(extraFieldFetcher.ReplaceAllString(RawData, ""), "\"")
efSlices := strings.Split(RawData, ",")
if len(efSlices) <= 2 {
return
}
m.ActiveStatus = efSlices[0]
m.Rid = efSlices[1]
}
func (m *ProvModel) Validate() bool {
if len(m.Rid) != 40 {
return false
}
return true
}
func (m *ProvModel) Bytes() []byte {
return []byte(m.RawData)
}
type ProvShare struct{}
var Provision struct {
Manage struct {
Content ProvContent
Group ProvGroup
Model ProvModel
Share ProvShare
}
Admin struct {
Auth ProvModel
}
Register ProvModel
}
type ProvRestModel interface {
// GetPath retrive the URL path for each different models
GetPath() string
Create(attr *interface{}) Response
Find(id string) Response
All() Response
Update(attr *interface{}) Response
Delete(attr *interface{}) Response
}
// ProvCall is a helper function that carries out HTTP requests for Provisioning API calls
func ProvCall(path, key, data, method string, managebycik bool, extra_headers http.Header) (interface{}, error) {
client := &http.Client{}
var serverUrl = ""
serverUrl = "https://m2.exosite.com"
//fmt.Printf(serverUrl + path + "\n\n")
req, _ := http.NewRequest(method, serverUrl+path, strings.NewReader(data))
req.Header = extra_headers
if managebycik {
req.Header.Add("X-Exosite-CIK", key)
} else {
req.Header.Add("X-Exosite-Token", key)
}
if method == "POST" {
req.Header.Add("Content-Type", "application/x-www-form-urlencoded; charset=utf-8")
}
req.Header.Add("Accept", "text/plain, text/csv, application/x-www-form-urlencoded")
// uncomment to print request (for debugging)
// reqdump, _ := httputil.DumpRequestOut(req, true)
// fmt.Printf("\r\n\r\n" + string(reqdump) + "\r\n\r\n")
resp, err := client.Do(req)
if err != nil {
return resp, err
}
defer resp.Body.Close()
body, readErr := ioutil.ReadAll(resp.Body)
if readErr != nil {
return body, readErr
}
return body, nil
}
// content_create implements POST to /provision/manage/content/<MODEL>/
func Content_create(provModel ProvModel, key, model, contentid, meta string, protect bool) (interface{}, error) {
var data = "id=" + contentid + "&meta=" + meta
if protect != false {
data = data + "&protected=true"
}
var path = PROVISION_MANAGE_CONTENT + model + "/"
var headers = http.Header{}
return ProvCall(path, key, data, "POST", provModel.managebycik, headers)
}
// content_download implements GET to /provision/download
func Content_download(provModel ProvModel, cik, vendor, model, contentid string) (interface{}, error) {
var data = "vendor=" + vendor + "&model=" + model + "&id=" + contentid
var headers = http.Header{}
headers.Add("Accept", "*")
return ProvCall(PROVISION_DOWNLOAD, cik, data, "GET", provModel.managebycik, headers)
}
// content_info implements GET to /provision/manage/content/<MODEL>/<CONTENT_ID>
// or GET to /provision/download
func Content_info(provModel ProvModel, key, model, contentid, vendor string) (interface{}, error) {
var headers = http.Header{}
if vendor == "" {
var path = PROVISION_MANAGE_CONTENT + model + "/" + contentid
return ProvCall(path, key, "", "GET", provModel.managebycik, headers)
} else {
var data = "vendor=" + vendor + "&model=" + model + "&info=true"
return ProvCall(PROVISION_DOWNLOAD, key, data, "GET", provModel.managebycik, headers)
}
}
// content_list implements GET to /provision/manage/content/<MODEL>/
func Content_list(provModel ProvModel, key, model string) (interface{}, error) {
var path = PROVISION_MANAGE_CONTENT + model + "/"
var headers = http.Header{}
return ProvCall(path, key, "", "GET", provModel.managebycik, headers)
}
// content_remove implements DELETE to /provision/manage/content/<MODEL>/<CONTENT_ID>
func Content_remove(provModel ProvModel, key, model, contentid string) (interface{}, error) {
var headers = http.Header{}
var path = PROVISION_MANAGE_CONTENT + model + "/" + contentid
return ProvCall(path, key, "", "DELETE", provModel.managebycik, headers)
}
// content_upload implements POST to /provision/manage/content/<MODEL>/<CONTENT_ID>
func Content_upload(provModel ProvModel, key, model, contentid, data, mimetype string) (interface{}, error) {
var headers = http.Header{}
headers.Add("Content-Type", mimetype)
var path = PROVISION_MANAGE_CONTENT + model + "/" + contentid
return ProvCall(path, key, data, "POST", provModel.managebycik, headers)
}
// model_create implements POST to /provision/manage/model/
func Model_create(provModel ProvModel, key, model, sharecode string, aliases, comments, historical bool) (interface{}, error) {
var headers = http.Header{}
var data = "model=" + model
if provModel.managebysharecode {
data = data + "&code=" + sharecode
} else {
data = data + "&rid=" + sharecode
}
if aliases == false {
data = data + "&options[]=noaliases"
}
if comments == false {
data = data + "&options[]=nocomments"
}
if historical == false {
data = data + "&options[]=nohistorical"
}
return ProvCall(PROVISION_MANAGE_MODEL, key, data, "POST", provModel.managebycik, headers)
}
// model_info implements GET to provision/manage/model/<MODEL>
func Model_info(provModel ProvModel, key, model string) (interface{}, error) {
var headers = http.Header{}
return ProvCall(PROVISION_MANAGE_MODEL+model, key, "", "GET", provModel.managebycik, headers)
}
// model_list implements GET to /provision/manage/model/
func Model_list(provModel ProvModel, key string) (interface{}, error) {
var headers = http.Header{}
return ProvCall(PROVISION_MANAGE_MODEL, key, "", "GET", provModel.managebycik, headers)
}
// model_remove implements DELETE to /provision/manage/model/<MODEL>
func Model_remove(provModel ProvModel, key, model string) (interface{}, error) {
var headers = http.Header{}
var data = "delete=true&model=" + model + "&confirm=true"
var path = PROVISION_MANAGE_MODEL + model
return ProvCall(path, key, data, "DELETE", provModel.managebycik, headers)
}
// model_update implements PUT to /provision/manage/model/<MODEL>
func Model_update(provModel ProvModel, key, model, clonerid string, aliases, comments, historical bool) (interface{}, error) {
var headers = http.Header{}
var data = "rid=" + clonerid
var path = PROVISION_MANAGE_MODEL + model
return ProvCall(path, key, data, "PUT", provModel.managebycik, headers)
}
// serialnumber_activate implements POST to /provision/activate
func Serialnumber_activate(provModel ProvModel, model, serialnumber, vendor string) (interface{}, error) {
var headers = http.Header{}
var data = "vendor=" + vendor + "&model=" + model + "&sn=" + serialnumber
return ProvCall(PROVISION_ACTIVATE, "", data, "POST", provModel.managebycik, headers)
}
// serialnumber_add implements POST to /provision/manage/model/<MODEL>/
func Serialnumber_add(provModel ProvModel, key, model, sn string) (interface{}, error) {
var headers = http.Header{}
var data = "add=true&sn=" + sn
var path = PROVISION_MANAGE_MODEL + model + "/"
return ProvCall(path, key, data, "POST", provModel.managebycik, headers)
}
// serialnumber_add_batch implements POST to /provision/manage/model/<MODEL>/
func Serialnumber_add_batch(provModel ProvModel, key, model string, sns []string) (interface{}, error) {
var headers = http.Header{}
var data = "add=true"
for i := range sns {
data = data + "&sn[]=" + sns[i]
}
var path = PROVISION_MANAGE_MODEL + model + "/"
return ProvCall(path, key, data, "POST", provModel.managebycik, headers)
}
// serialnumber_disable implements POST to /provision/manage/model/<MODEL>/<SN>
func Serialnumber_disable(provModel ProvModel, key, model, serialnumber string) (interface{}, error) {
var headers = http.Header{}
var data = "disable=true"
var path = PROVISION_MANAGE_MODEL + model + "/" + serialnumber
return ProvCall(path, key, data, "POST", provModel.managebycik, headers)
}
// serialnumber_enable implements POST to /provision/manage/model/<MODEL>/<SN>
func Serialnumber_enable(provModel ProvModel, key, model, serialnumber, owner string) (interface{}, error) {
var headers = http.Header{}
var data = "enable=true&owner=" + owner
var path = PROVISION_MANAGE_MODEL + model + "/" + serialnumber
return ProvCall(path, key, data, "POST", provModel.managebycik, headers)
}
// serialnumber_info implements GET to /provision/manage/model/<MODEL>/<SN>
func Serialnumber_info(provModel ProvModel, key, model, serialnumber string) (interface{}, error) {
var headers = http.Header{}
var path = PROVISION_MANAGE_MODEL + model + "/" + serialnumber
return ProvCall(path, key, "", "GET", provModel.managebycik, headers)
}
// serialnumber_list implements GET to /provision/manage/model/<MODEL>/
func Serialnumber_list(provModel ProvModel, key, model string, offset, limit int) (interface{}, error) {
var headers = http.Header{}
var data = "offset=" + strconv.Itoa(offset) + "&limit=" + strconv.Itoa(limit)
var path = PROVISION_MANAGE_MODEL + model + "/"
return ProvCall(path, key, data, "GET", provModel.managebycik, headers)
}
// serialnumber_reenable implements POST to /provision/manage/model/<MODEL>/<SN>
func Serialnumber_reenable(provModel ProvModel, key, model, serialnumber string) (interface{}, error) {
var headers = http.Header{}
var data = "enable=true"
var path = PROVISION_MANAGE_MODEL + model + "/" + serialnumber
return ProvCall(path, key, data, "POST", provModel.managebycik, headers)
}
// serialnumber_remap implements POST to /provision/manage/model/<MODEL>/<SN>
func Serialnumber_remap(provModel ProvModel, key, model, serialnumber, oldsn string) (interface{}, error) {
var headers = http.Header{}
var data = "enable=true&oldsn=" + oldsn
var path = PROVISION_MANAGE_MODEL + model + "/" + serialnumber
return ProvCall(path, key, data, "POST", provModel.managebycik, headers)
}
// serialnumber_remove implements DELETE to /provision/manage/model/<MODEL>/<SN>
func Serialnumber_remove(provModel ProvModel, key, model, serialnumber string) (interface{}, error) {
var headers = http.Header{}
var path = PROVISION_MANAGE_MODEL + model + "/" + serialnumber
return ProvCall(path, key, "", "DELETE", provModel.managebycik, headers)
}
// serialnumber_remove_batch implements POST to /provision/manage/model/<MODEL>/
func Serialnumber_remove_batch(provModel ProvModel, key, model string, sns []string) (interface{}, error) {
var headers = http.Header{}
var data = "remove=true"
for i := range sns {
data = data + "&sn[]=" + sns[i]
}
var path = PROVISION_MANAGE_MODEL + model + "/"
return ProvCall(path, key, data, "POST", provModel.managebycik, headers)
}
// vendor_register implements POST to /provision/register
func Vendor_register(provModel ProvModel, key, vendor string) (interface{}, error) {
var headers = http.Header{}
var data = "vendor=" + vendor
return ProvCall(PROVISION_REGISTER, key, data, "POST", provModel.managebycik, headers)
}
// vendor_show implements GET to /provision/register
func Vendor_show(key string) (interface{}, error) {
var headers = http.Header{}
return ProvCall(PROVISION_REGISTER, key, "", "GET", false, headers)
}
// vendor_unregister implements POST to /provision/register
func Vendor_unregister(key, vendor string) (interface{}, error) {
var headers = http.Header{}
var data = "delete=true&vendor=" + vendor
return ProvCall(PROVISION_REGISTER, key, data, "POST", false, headers)
}
|
<gh_stars>1-10
import { Message, PrismaClient } from "@prisma/client"
import { RepositoryError, UnknownRepositoryError } from "../../../../domain/repository/RepositoryError"
import { ChangeEventHandler } from "../../../ChangeEventHandler"
import { IMessageCommandRepository } from "../../../../domain/repository/command/Message"
import { MessageEntity } from "../../../../domain/entity/Message"
import { MessageId } from "../../../../domain/types"
import { prisma } from "../client"
export function has_changed(a: Message, b: Message) {
return !(
a.channelId === b.channelId &&
a.userId === b.userId &&
a.text === b.text &&
a.textStyle === b.textStyle &&
a.createdAt?.getTime() === b.createdAt?.getTime() &&
a.favoriteCount === b.favoriteCount &&
a.likeCount === b.likeCount &&
a.replyCount === b.replyCount &&
a.threadId === b.threadId &&
a.deleted === b.deleted
)
}
export class MessageCommandRepository extends ChangeEventHandler implements IMessageCommandRepository {
private _prisma: PrismaClient
constructor(transaction?: PrismaClient) {
super(MessageCommandRepository)
if (transaction) {
this._prisma = transaction
} else {
this._prisma = prisma
}
}
async add(message: MessageEntity): Promise<MessageId> {
if (message instanceof MessageEntity !== true) {
throw new RepositoryError("`message` must be an instance of MessageEntity")
}
try {
const result = await this._prisma.message.create({
data: {
text: message.text,
textStyle: message.textStyle,
channelId: message.channelId,
userId: message.userId,
favoriteCount: message.favoriteCount,
likeCount: message.likeCount,
replyCount: message.replyCount,
threadId: message.threadId,
},
})
return result.id
} catch (error) {
if (error instanceof Error) {
throw new RepositoryError(error.message, error.stack)
} else {
throw new UnknownRepositoryError()
}
}
}
async update(message: MessageEntity): Promise<boolean> {
if (message instanceof MessageEntity !== true) {
throw new RepositoryError("`message` must be an instance of MessageEntity")
}
try {
const origMessage = await this._prisma.message.findUnique({
where: {
id: message.id,
},
})
if (origMessage == null) {
throw new RepositoryError(`Message not found (id=${message.id})`)
}
const updatedMessage = await this._prisma.message.update({
where: {
id: message.id,
},
data: {
text: message.text,
textStyle: message.textStyle,
channelId: message.channelId,
userId: message.userId,
favoriteCount: message.favoriteCount,
likeCount: message.likeCount,
replyCount: message.replyCount,
threadId: message.threadId,
deleted: message.deleted,
},
})
if (has_changed(origMessage, updatedMessage)) {
await this.emitChanges(message.id)
return true
}
return false
} catch (error) {
if (error instanceof Error) {
throw new RepositoryError(error.message, error.stack)
} else {
throw new UnknownRepositoryError()
}
}
}
async delete(message: MessageEntity): Promise<boolean> {
if (message instanceof MessageEntity !== true) {
throw new RepositoryError("`message` must be an instance of MessageEntity")
}
try {
await this._prisma.message.delete({
where: {
id: message.id,
},
})
await this.emitChanges(message.id)
return true
} catch (error) {
if (error instanceof Error) {
throw new RepositoryError(error.message, error.stack)
} else {
throw new UnknownRepositoryError()
}
}
}
}
|
<reponame>lionelpa/openvalidation
/*
* Copyright 2019 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package util;
import io.openvalidation.common.data.DataPropertyBase;
import io.openvalidation.common.data.DataPropertyType;
import java.util.function.Function;
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
public class DataPropertyBaseTypeMatcher extends BaseMatcher<DataPropertyBase> {
private final String description;
private final Function<DataPropertyBase, Boolean> matcher;
public DataPropertyBaseTypeMatcher(DataPropertyType propertyType) {
this.description = "a DataPropertyBase object with type " + propertyType.toString();
matcher = (x -> x.getType() == propertyType);
}
@Override
public boolean matches(Object argument) {
return matcher.apply((DataPropertyBase) argument);
}
@Override
public void describeTo(Description description) {
description.appendText(this.description);
}
}
|
def product_of_list(nums):
result = 1
for num in nums:
result *= num
return result
if __name__ == '__main__':
nums = [1, 2, 3]
print(product_of_list(nums)) |
// Copyright 2004-present Facebook. All Rights Reserved.
package com.facebook.cipher.jni;
import com.facebook.crypto.keychain.KeyChain;
import com.facebook.crypto.proguard.annotations.DoNotStrip;
import com.facebook.jni.HybridData;
/**
* JNI wrapper for Conceal's Decrypt object in C++.
*/
public class CipherHybrid {
// load native
@DoNotStrip
private final HybridData mHybridData;
public CipherHybrid(byte configId, KeyChain keyChain) {
mHybridData = initHybrid(configId, keyChain);
}
private CipherHybrid(HybridData hybridData) {
// to be created from C++
mHybridData = hybridData;
}
private static native HybridData initHybrid(byte configId, KeyChain keyChain);
public native EncryptHybrid createEncrypt(byte[] entity, int offset, int count);
public native DecryptHybrid createDecrypt(byte[] entity, int offset, int count);
}
|
const express = require('express');
const app = express();
//create an array of books
let books = [
{ title: 'Harry Potter', author: 'J.K Rowling' },
{ title: 'The Alchemist', author: 'Paul Coelho' },
{ title: 'The Lord of the Rings', author: 'J.R.R Tolkien' },
];
//GET endpoint
app.get('/api/books', (req, res) => {
res.send({ books: books })
});
//listen for requests
const port = process.env.PORT || 3000
app.listen(port, () => console.log(`Listening on port ${port}...`)); |
<gh_stars>0
class OrderFullfillmentsController < ApplicationController
before_action :find_item, only: [:show, :edit]
def index
@items = Item.all
end
def show
end
def edit
@order = Order.find(params[:id])
end
def update
end
def destroy
end
private
def find_item
@item = Item.find_by(id: params[:id])
end
def item_params
params.require(:order).permit(:shipping_status)
end
end
|
class TS_Texts:
# __init__()
init_error1 = "ts must be of type datetime.datetime or sptemp.zeit.Time_Period!"
# value()
def value(self, ts):
if not isinstance(ts, (datetime.datetime, sptemp.zeit.Time_Period)):
raise TypeError(self.init_error1)
# Implement logic to retrieve the value associated with the timestamp 'ts' from the time series data
# ... |
#!/bin/bash
TASK=8
SHOT=25
LANG=id
MODEL=ctrl_muniter
MODEL_CONFIG=ctrl_muniter_base
TASKS_CONFIG=iglue_fewshot_tasks_boxes36.dtu
TRTASK=RetrievalxFlickrCO${LANG}_${SHOT}
TEXT_TR=/home/projects/ku_00062/data/xFlickrCO/annotations/${LANG}/train_${SHOT}.jsonl
FEAT_TR=/home/projects/ku_00062/data/xFlickrCO/features/xflickrco-few_boxes36.lmdb
TEXT_TE=/home/projects/ku_00062/data/flickr30k/annotations_machine-translate/valid-${LANG}_gmt.jsonl
PRETRAINED=/home/projects/ku_00062/checkpoints/iglue/zero_shot/xflickrco/${MODEL}/RetrievalFlickr30k_${MODEL_CONFIG}/pytorch_model_best.bin
here=$(pwd)
source /home/projects/ku_00062/envs/iglue/bin/activate
cd ../../../../../../volta
for lr in 1e-4 5e-5 1e-5; do
OUTPUT_DIR=/home/projects/ku_00062/checkpoints/iglue/few_shot.mt/xflickrco/${TRTASK}/${MODEL}/${lr}
LOGGING_DIR=/home/projects/ku_00062/logs/iglue/few_shot.mt/xflickrco/${TRTASK}/${lr}/${MODEL_CONFIG}
python train_task.py \
--bert_model /home/projects/ku_00062/huggingface/bert-base-multilingual-cased --config_file config/${MODEL_CONFIG}.json \
--from_pretrained ${PRETRAINED} \
--tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK --num_epoch 20 \
--train_split train_${LANG}_${SHOT} --train_annotations_jsonpath $TEXT_TR --train_features_lmdbpath $FEAT_TR \
--val_split dev_${LANG}_gmt --val_annotations_jsonpath $TEXT_TE \
--lr $lr --batch_size 8 --gradient_accumulation_steps 1 --num_workers 0 --save_every_num_epochs 5 --eval_batch_size 16 --max_val_batches 130 \
--adam_epsilon 1e-6 --adam_betas 0.9 0.999 --adam_correct_bias --weight_decay 0.0001 --warmup_proportion 0.1 --clip_grad_norm 1.0 \
--output_dir ${OUTPUT_DIR} \
--logdir ${LOGGING_DIR} \
&> ${here}/train.${lr}.log
done
deactivate
|
'use strict';
var MockLayoutManager = {
width: window.innerWidth,
fullscreenHeight: window.innerHeight,
usualHeight: window.innerHeight,
keyboardEnabled: false,
match: function() {
return true;
},
mTeardown: function mlm_mTeardown() {
this.width = window.innerWidth;
this.fullscreenHeight = window.innerHeight;
this.usualHeight = window.innerHeight;
this.keyboardEnabled = false;
}
};
|
<filename>Spring-Framework-v5.3.13/spring-context-debug/src/main/java/com/kapcb/ccc/model/DebugBean.java
package com.kapcb.ccc.model;
/**
* <a>Title: Bean </a>
* <a>Author: Kapcb <a>
* <a>Description: Bean <a>
*
* @author Kapcb
* @version 1.0
* @date 2021/12/11 23:21
* @since 1.0
*/
public interface DebugBean {
/**
* say method
*/
void say();
}
|
<reponame>ch1huizong/learning
#!/usr/bin/env python
# -*- coding:UTF-8 -*-
import re
address = re.compile('''
((?P<name>
([\w.,]+\s+)*[\w.,]+)
\s+
) #name is no longer optional,名字必须
#LookAhead,断言前项
(?= (<.*>$) #wrapped
|
([^<].*[^>]$) #not wrapped
)
<? #start_option
#the address itself
(?P<email>
[\w\d.+-]+ #username
@
([\w\d.]+\.)+ #domain name prefix
(com|org|edu) # domain name
)
>? #end_optional
''',
re.UNICODE| re.VERBOSE)
candidates = [
u'First Last <<EMAIL>>',
u'No Brackets <EMAIL>',
u'Open Bracket <<EMAIL>',
u'Close Bracket <EMAIL>>',
]
for candidate in candidates:
print' Candidate:',candidate
match = address.search(candidate)
if match:
print' Name:',match.groupdict()['name']
print' Email:',match.groupdict()['email']
print
else:
print' No match'
|
#!/bin/bash
export QSPEC=${CONDA_PREFIX}/q/spec.q
|
package org.f5n.aoc2020;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.f5n.aoc2020.utils.*;
import org.f5n.aoc2020.days.*;
@RestController
public class PerfController {
protected final Logger logger = LoggerFactory.getLogger(getClass());
private static DayController dc = new DayController();
@RequestMapping("/perf/{day}/{num}")
public Map<String, Long> perf(@PathVariable int day, @PathVariable int num,
@RequestParam(name = "part", defaultValue = "0") int part) {
long start = System.currentTimeMillis();
Map<String, Long> rv = new HashMap<>();
if (day < 1 || day > 25 ) {
rv.put("error", 1L);
return rv;
}
if (part < 1 || part > 2) {
part = 0;
}
String sDay = "" + (day > 10 ? day : "0" + day);
Method dayMethod;
try {
dayMethod = DayController.class.getMethod("day" + sDay, int.class);
logger.info("Calling DC.day" + sDay + "(" + part + ")");
for (int i = 0; i < num; ++i) {
dayMethod.invoke(dc, part);
}
} catch (NoSuchMethodException ex) {
logger.warn(ex.toString());
rv.put("error", 2L);
return rv;
} catch (IllegalAccessException ex) {
logger.warn(ex.toString());
rv.put("error", 2L);
return rv;
} catch (InvocationTargetException ex) {
logger.warn(ex.toString());
rv.put("error", 2L);
return rv;
}
Long end = System.currentTimeMillis() - start;
rv.put("day", (long) day);
rv.put("num", (long) num);
rv.put("timeMs", end);
rv.put("avgNs", (end * 1000 / num));
return rv;
}
} |
package io.opensphere.mantle.data.impl;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.MutableTreeNode;
import javax.swing.tree.TreeNode;
import org.apache.log4j.Logger;
import io.opensphere.core.util.Utilities;
import io.opensphere.core.util.collections.CollectionUtilities;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.collections.StreamUtilities;
import io.opensphere.core.util.lang.Pair;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.MapVisualizationType;
/**
* The Class DataGroupInfoGroupByUtility.
*/
@SuppressWarnings("PMD.GodClass")
public final class DataGroupInfoGroupByUtility
{
/** The Constant myDGIComparator. */
public static final DGIComparator ourDGIComparator = new DGIComparator();
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(DataGroupInfoGroupByUtility.class);
/** The Constant ourDTINameAndTypeComparator. */
private static final DTIDisplayNameAndFeatureTypeComparator ourDTINameAndTypeComparator = new DTIDisplayNameAndFeatureTypeComparator();
/**
* Creates the group by tree for the collection of data group info. The tree
* will be composed of root nodes provided by the given Categorizer
* parameter, each DataGroupInfo in the passed in collection will first be
* filtered
*
* @param builder the {@link GroupByTreeBuilder}
* @param userObjGen the {@link NodeUserObjectGenerator}
* @param dataGroups the {@link Collection} of {@link DataGroupInfo} to be
* considered for inclusion.
* @return the tree node
*/
public static TreeNode createGroupByTree(GroupByTreeBuilder builder, NodeUserObjectGenerator userObjGen,
Collection<DataGroupInfo> dataGroups)
{
Utilities.checkNull(dataGroups, "dataGroups");
Utilities.checkNull(builder, "builder");
Utilities.checkNull(builder.getGroupCategorizer(), "builder.getGroupCategorizer()");
DefaultMutableTreeNode result = new DefaultMutableTreeNode();
NodeUserObjectGenerator uoGen = userObjGen == null ? new DefaultNodeUserObjectGenerator() : userObjGen;
TreeOptions treeOptions = builder.getTreeOptions();
dataGroups.removeIf(e -> e.getId().equals("Area"));
if (treeOptions == null || treeOptions.isFlattenTree())
{
createFlattenedTree(builder, uoGen, dataGroups, result);
}
else
{
createFullTree(builder.getDataCategoryFilter(), builder.getGroupFilter(), uoGen, dataGroups, result);
}
return result;
}
/**
* Adds a data group node to the passed in cat node.
*
* @param dgi The data group to add the node for.
* @param treeOptions The tree options.
* @param catNode The node to add to.
* @param userObjGen Generates the user object to give to the node.
* @param subNodeCount keeps track of sub node count.
* @return The new sub not count.
*/
private static int addDataGroupNode(DataGroupInfo dgi, TreeOptions treeOptions, DefaultMutableTreeNode catNode,
NodeUserObjectGenerator userObjGen, int subNodeCount)
{
int newSubNodeCount = subNodeCount;
DefaultMutableTreeNode dgiNode;
if (dgi.getChildren().isEmpty() && dgi.getMembers(false).size() == 1 && dgi.isFlattenable())
{
dgiNode = new DefaultMutableTreeNode(userObjGen.createNodeUserObject(dgi, dgi.getMembers(false).iterator().next()));
}
else
{
dgiNode = new DefaultMutableTreeNode(userObjGen.createNodeUserObject(dgi));
}
catNode.add(dgiNode);
if (dgi.isFlattenable())
{
newSubNodeCount++;
}
if (treeOptions != null && (dgi.numMembers(false) > 1 || !dgi.isFlattenable())
&& treeOptions.isSubNodesForMultiMemberGroups())
{
getSortedMemberList(dgi.getMembers(false)).stream()
.map(dti -> new DefaultMutableTreeNode(userObjGen.createNodeUserObject(dgi, dti))).forEach(dgiNode::add);
}
for (DataGroupInfo child : dgi.getChildren())
{
newSubNodeCount = addDataGroupNode(child, treeOptions, dgiNode, userObjGen, newSubNodeCount);
}
return newSubNodeCount;
}
/**
* Adds the to group if passes filter.
*
* @param dataCategoryFilter the data category filter
* @param groupFilter the group filter
* @param addToList the add to list
* @param dgi the dgi
*/
private static void addToGroupIfPassesFilter(Predicate<DataGroupInfo> dataCategoryFilter,
Predicate<DataGroupInfo> groupFilter, List<DataGroupInfo> addToList, DataGroupInfo dgi)
{
if ((dataCategoryFilter == null || dataCategoryFilter.test(dgi)) && (groupFilter == null || groupFilter.test(dgi)))
{
addToList.add(dgi);
}
if (dgi.hasChildren() && dgi.isFlattenable())
{
dgi.getChildren().forEach(child -> addToGroupIfPassesFilter(dataCategoryFilter, groupFilter, addToList, child));
}
}
/**
* Determines if the group or any of it's descendants passes the filter.
*
* @param groupFilter the group filter
* @param dgi the dgi
* @return whether the group or any of it's descendants passes the filter
*/
private static boolean anyGroupPassesFilter(Predicate<DataGroupInfo> groupFilter, DataGroupInfo dgi)
{
boolean passes = false;
if (dgi.hasChildren())
{
passes = dgi.getChildren().stream().filter(c -> anyGroupPassesFilter(groupFilter, c)).findFirst().isPresent();
}
else
{
passes = groupFilter == null || groupFilter.test(dgi);
}
return passes;
}
/**
* Creates the flattened tree.
*
* @param builder the {@link GroupByTreeBuilder}
* @param userObjGen the {@link NodeUserObjectGenerator}
* @param dataGroups the {@link Collection} of {@link DataGroupInfo} to be
* considered for inclusion.
* @param parentNode the parent node
*/
private static void createFlattenedTree(GroupByTreeBuilder builder, NodeUserObjectGenerator userObjGen,
Collection<DataGroupInfo> dataGroups, DefaultMutableTreeNode parentNode)
{
TreeOptions treeOptions = builder.getTreeOptions();
if (treeOptions == null || !treeOptions.isBuildWithTypesInsteadOfGroups())
{
Map<String, List<DataGroupInfo>> catToGroupsMap = createGroupBy(builder.getDataCategoryFilter(),
builder.getGroupFilter(), builder.getGroupComparator(), builder.getGroupCategorizer(), dataGroups);
List<String> categories = builder.getGroupCategorizer().getAllCategories();
for (String cat : categories)
{
GroupByNodeUserObject catNodeUserObj = userObjGen.createNodeUserObject(cat);
DefaultMutableTreeNode catNode = new DefaultMutableTreeNode(catNodeUserObj);
List<DataGroupInfo> dgiList = catToGroupsMap.get(cat);
int subNodeCount = 0;
if (dgiList != null && !dgiList.isEmpty())
{
for (DataGroupInfo dgi : dgiList)
{
subNodeCount = addDataGroupNode(dgi, treeOptions, catNode, userObjGen, subNodeCount);
}
}
catNodeUserObj.setLabel(catNodeUserObj.getLabel());
catNodeUserObj.setCategoryCount(subNodeCount);
if (catNode.getChildCount() == 1)
{
TreeNode onlyChild = catNode.getChildAt(0);
if (onlyChild.toString().equals(catNode.toString()) && onlyChild instanceof DefaultMutableTreeNode)
{
catNode.remove(0);
for (int i = 0; i < onlyChild.getChildCount();)
{
Object nextElement = onlyChild.getChildAt(i);
if (nextElement instanceof MutableTreeNode)
{
catNode.add((MutableTreeNode)nextElement);
}
}
}
}
if (catNode.getChildCount() > 0)
{
parentNode.add(catNode);
}
}
}
else
{
Map<String, List<Pair<DataGroupInfo, DataTypeInfo>>> catToTypeMap = createGroupByDataType(
builder.getDataCategoryFilter(), builder.getGroupFilter(), builder.getTypeComparator(),
builder.getGroupCategorizer(), dataGroups);
List<String> categories = builder.getGroupCategorizer().getAllCategories();
for (String cat : categories)
{
GroupByNodeUserObject catNodeUserObj = userObjGen.createNodeUserObject(cat);
DefaultMutableTreeNode catNode = new DefaultMutableTreeNode(catNodeUserObj);
int subNodeCount = 0;
parentNode.add(catNode);
List<Pair<DataGroupInfo, DataTypeInfo>> typeList = catToTypeMap.get(cat);
if (typeList != null && !typeList.isEmpty())
{
for (Pair<DataGroupInfo, DataTypeInfo> pair : typeList)
{
DefaultMutableTreeNode dgiNode = new DefaultMutableTreeNode(
userObjGen.createNodeUserObject(pair.getFirstObject(), pair.getSecondObject()));
catNode.add(dgiNode);
subNodeCount++;
}
}
catNodeUserObj.setLabel(catNodeUserObj.getLabel());
catNodeUserObj.setCategoryCount(subNodeCount);
}
}
}
/**
* Creates the fully structured tree.
*
* @param dataCategoryFilter the data category filter
* @param groupFilter the group filter ( if null "all" are selected).
* @param userObjGen the {@link NodeUserObjectGenerator}
* @param dataGroups the {@link Collection} of {@link DataGroupInfo} to be
* considered for inclusion.
* @param parentNode the parent node
*/
private static void createFullTree(final Predicate<DataGroupInfo> dataCategoryFilter,
final Predicate<DataGroupInfo> groupFilter, NodeUserObjectGenerator userObjGen, Collection<DataGroupInfo> dataGroups,
DefaultMutableTreeNode parentNode)
{
if (CollectionUtilities.hasContent(dataGroups))
{
boolean preserveChildOrder = false;
if (parentNode.getUserObject() instanceof GroupByNodeUserObject)
{
GroupByNodeUserObject userObject = (GroupByNodeUserObject)parentNode.getUserObject();
preserveChildOrder = userObject.getDataGroupInfo().isPreserveChildOrder();
}
if (preserveChildOrder)
{
for (DataGroupInfo group : dataGroups)
{
DefaultMutableTreeNode node = new DefaultMutableTreeNode(userObjGen.createNodeUserObject(group));
parentNode.add(node);
createFullTree(dataCategoryFilter, groupFilter, userObjGen, group.getChildren(), node);
}
}
else
{
// Add the groups with children that have descendants that pass
// the group filter
List<DataGroupInfo> folderGroups = StreamUtilities.filter(dataGroups,
group -> group.hasChildren() && anyGroupPassesFilter(groupFilter, group));
Collections.sort(folderGroups, DefaultDataGroupInfo.CASE_INSENSITIVE_DISPLAY_NAME_COMPARATOR);
for (DataGroupInfo group : folderGroups)
{
DefaultMutableTreeNode node = new DefaultMutableTreeNode(userObjGen.createNodeUserObject(group));
parentNode.add(node);
createFullTree(dataCategoryFilter, groupFilter, userObjGen, group.getChildren(), node);
}
// Add the groups with members that pass the data category and
// group filters
List<DataGroupInfo> memberGroups = StreamUtilities.filter(dataGroups,
group -> group.hasMembers(false) && (dataCategoryFilter == null || dataCategoryFilter.test(group))
&& (groupFilter == null || groupFilter.test(group)));
Collections.sort(memberGroups, DefaultDataGroupInfo.CASE_INSENSITIVE_DISPLAY_NAME_COMPARATOR);
memberGroups.stream().map(group -> new DefaultMutableTreeNode(userObjGen.createNodeUserObject(group)))
.forEach(parentNode::add);
}
}
}
/**
* Groups the data into a map of categories to lists of data group info.
* Sorts the resultant list by the provided comparator or by name if no
* comparator is provided. Groups are selected from the data group
* controller by the provided filter and categorized ( multiple categories
* are allowed per group) by the GroupCatagorizer.
*
* @param dataCategoryFilter the data category filter
* @param groupFilter the group filter ( if null "all" are selected).
* @param groupComparator the group comparator a {@link Comparator} that
* helps sort the resultant lists. (if null sorted in natural
* name order).
* @param categorizer the categorizer a {@link GroupCategorizer} that
* provides the categories for each entry.
* @param collection the {@link Collection} of {@link DataGroupInfo} to use
* to create the group by.
* @return the result {@link Map} of category to {@link List} of
* {@link DataGroupInfo}
*/
private static Map<String, List<DataGroupInfo>> createGroupBy(Predicate<DataGroupInfo> dataCategoryFilter,
Predicate<DataGroupInfo> groupFilter, Comparator<? super DataGroupInfo> groupComparator, GroupCategorizer categorizer,
Collection<DataGroupInfo> collection)
{
Utilities.checkNull(collection, "collection");
Utilities.checkNull(categorizer, "categorizer");
Map<String, List<DataGroupInfo>> result = New.map();
List<DataGroupInfo> dgiList = New.linkedList();
collection.forEach(dgi -> addToGroupIfPassesFilter(dataCategoryFilter, groupFilter, dgiList, dgi));
Set<String> categories = null;
for (DataGroupInfo dgi : dgiList)
{
categories = categorizer.getGroupCategories(dgi);
if (categories != null && !categories.isEmpty())
{
for (String category : categories)
{
List<DataGroupInfo> list = result.get(category);
if (list == null)
{
list = New.list();
result.put(category, list);
}
list.add(dgi);
}
}
}
// Sort each list by the provided comparator, or by display name if no
// comparator is provided.
for (Map.Entry<String, List<DataGroupInfo>> entry : result.entrySet())
{
if (groupComparator == null)
{
Collections.sort(entry.getValue(), DataGroupInfo.CASE_INSENSITIVE_DISPLAY_NAME_COMPARATOR);
}
else
{
Collections.sort(entry.getValue(), groupComparator);
}
}
return result;
}
/**
* Groups the data into a map of categories to lists of data type info.
* Sorts the resultant list by the provided comparator or by name if no
* comparator is provided. Groups are selected from the data group
* controller by the provided filter and categorized ( multiple categories
* are allowed per group) by the GroupCatagorizer.
*
* @param dataCategoryFilter the filter used to narrow the set of data
* categories.
* @param groupFilter the group filter ( if null "all" are selected).
* @param typeComparator the group comparator a {@link Comparator} that
* helps sort the resultant lists. (if null sorted in natural
* name order).
* @param categorizer the categorizer a {@link GroupCategorizer} that
* provides the categories for each entry.
* @param collection the {@link Collection} of {@link DataGroupInfo} to use
* to create the group by.
* @return the result {@link Map} of category to {@link List} of
* {@link DataGroupInfo}
*/
private static Map<String, List<Pair<DataGroupInfo, DataTypeInfo>>> createGroupByDataType(
Predicate<DataGroupInfo> dataCategoryFilter, Predicate<DataGroupInfo> groupFilter,
Comparator<? super DataTypeInfo> typeComparator, GroupCategorizer categorizer, Collection<DataGroupInfo> collection)
{
Utilities.checkNull(collection, "collection");
Utilities.checkNull(categorizer, "categorizer");
Map<String, List<Pair<DataGroupInfo, DataTypeInfo>>> result = New.map();
List<DataGroupInfo> dgiList = New.linkedList();
getSortedDGIList(collection).forEach(dgi -> addToGroupIfPassesFilter(dataCategoryFilter, groupFilter, dgiList, dgi));
Set<String> categories = null;
for (DataGroupInfo dgi : dgiList)
{
for (DataTypeInfo dti : dgi.getMembers(true))
{
categories = categorizer.getTypeCategories(dti);
if (categories != null && !categories.isEmpty())
{
for (String category : categories)
{
List<Pair<DataGroupInfo, DataTypeInfo>> list = result.get(category);
if (list == null)
{
list = New.list();
result.put(category, list);
}
list.add(new Pair<>(dgi, dti));
}
}
}
}
// Sort each list by the provided comparator, or by display name if no
// comparator is provided taking feature type into account.
for (Map.Entry<String, List<Pair<DataGroupInfo, DataTypeInfo>>> entry : result.entrySet())
{
if (typeComparator == null)
{
Collections.sort(entry.getValue(), new Comparator<Pair<DataGroupInfo, DataTypeInfo>>()
{
private final Comparator<DataTypeInfo> myDtiComp = DataTypeInfo.CASE_INSENSITIVE_DISPLAY_NAME_COMPARATOR;
@Override
public int compare(Pair<DataGroupInfo, DataTypeInfo> o1, Pair<DataGroupInfo, DataTypeInfo> o2)
{
MapVisualizationType mvt1 = getMapVisType(o1.getSecondObject());
MapVisualizationType mvt2 = getMapVisType(o2.getSecondObject());
int comparedByType = Integer.compare(mvt1.ordinal(), mvt2.ordinal());
return comparedByType == 0 ? myDtiComp.compare(o1.getSecondObject(), o2.getSecondObject())
: comparedByType;
}
});
}
else
{
final Comparator<? super DataTypeInfo> dtiComp = typeComparator;
Collections.sort(entry.getValue(), (o1, o2) -> dtiComp.compare(o1.getSecondObject(), o2.getSecondObject()));
}
}
return result;
}
/**
* Gets the map vis type.
*
* @param dti the dti
* @return the map vis type
*/
private static MapVisualizationType getMapVisType(DataTypeInfo dti)
{
MapVisualizationType type = MapVisualizationType.UNKNOWN;
if (dti != null && dti.getMapVisualizationInfo() != null)
{
type = dti.getMapVisualizationInfo().getVisualizationType();
}
return type;
}
/**
* Gets the sorted dgi list.
*
* @param dgiCollection the dgi collection
* @return the sorted dgi list
*/
private static List<DataGroupInfo> getSortedDGIList(Collection<DataGroupInfo> dgiCollection)
{
List<DataGroupInfo> dgiList = null;
if (CollectionUtilities.hasContent(dgiCollection))
{
dgiList = New.list(dgiCollection);
try
{
Collections.sort(dgiList, ourDGIComparator);
}
catch (IllegalArgumentException e)
{
LOGGER.warn(e);
Collections.sort(dgiList, DataGroupInfo.CASE_INSENSITIVE_DISPLAY_NAME_COMPARATOR);
}
}
return dgiList == null ? Collections.<DataGroupInfo>emptyList() : dgiList;
}
/**
* Gets the sorted member list.
*
* @param dtiCollection the dti collection
* @return the sorted member list
*/
private static List<DataTypeInfo> getSortedMemberList(Collection<DataTypeInfo> dtiCollection)
{
List<DataTypeInfo> dtiList = null;
if (CollectionUtilities.hasContent(dtiCollection))
{
dtiList = New.list(dtiCollection);
Collections.sort(dtiList, ourDTINameAndTypeComparator);
}
return dtiList == null ? Collections.<DataTypeInfo>emptyList() : dtiList;
}
/**
* Instantiates a new data group info group by utility.
*/
private DataGroupInfoGroupByUtility()
{
}
/**
* The Class DefaultNodeUserObjectGenerator.
*/
public static class DefaultNodeUserObjectGenerator implements NodeUserObjectGenerator
{
@Override
public GroupByNodeUserObject createNodeUserObject(DataGroupInfo dgi)
{
return new GroupByNodeUserObject(dgi);
}
@Override
public GroupByNodeUserObject createNodeUserObject(DataGroupInfo dgi, DataTypeInfo dti)
{
return new GroupByNodeUserObject(dgi, dti);
}
@Override
public GroupByNodeUserObject createNodeUserObject(String label)
{
return new GroupByNodeUserObject(label);
}
}
/**
* The Class TreeOptions.
*/
public static class TreeOptions
{
/** The my build with types instead of groups. */
private final boolean myBuildWithTypesInsteadOfGroups;
/** The sub nodes for multi member groups. */
private final boolean mySubNodesForMultiMemberGroups;
/** Whether to flatten the tree. */
private final boolean myFlattenTree;
/**
* Instantiates a new tree options.
*
* @param subNodesForMultiMemberGroups the sub nodes for multi member
* groups
*/
public TreeOptions(boolean subNodesForMultiMemberGroups)
{
this(subNodesForMultiMemberGroups, false);
}
/**
* Instantiates a new tree options.
*
* @param subNodesForMultiMemberGroups the sub nodes for multi member
* groups
* @param buildWithTypesInsteadOfGroups the build with types instead of
* groups
*/
public TreeOptions(boolean subNodesForMultiMemberGroups, boolean buildWithTypesInsteadOfGroups)
{
this(subNodesForMultiMemberGroups, buildWithTypesInsteadOfGroups, true);
}
/**
* Instantiates a new tree options.
*
* @param subNodesForMultiMemberGroups the sub nodes for multi member
* groups
* @param buildWithTypesInsteadOfGroups the build with types instead of
* groups
* @param flattenTree whether to flatten the tree
*/
public TreeOptions(boolean subNodesForMultiMemberGroups, boolean buildWithTypesInsteadOfGroups, boolean flattenTree)
{
mySubNodesForMultiMemberGroups = subNodesForMultiMemberGroups;
myBuildWithTypesInsteadOfGroups = buildWithTypesInsteadOfGroups;
myFlattenTree = flattenTree;
}
/**
* Checks if is builds the with types instead of groups.
*
* @return true, if is builds the with types instead of groups
*/
public boolean isBuildWithTypesInsteadOfGroups()
{
return myBuildWithTypesInsteadOfGroups;
}
/**
* Returns whether to flatten the tree.
*
* @return whether to flatten the tree
*/
public boolean isFlattenTree()
{
return myFlattenTree;
}
/**
* Checks if is sub nodes for multi member groups.
*
* @return true, if is sub nodes for multi member groups
*/
public boolean isSubNodesForMultiMemberGroups()
{
return mySubNodesForMultiMemberGroups;
}
}
/**
* The Class DTIDisplayNameAndFeatureTypeComparator. This comparator handles
* some different cases to try and keep the feature types ordered above tile
* types for most views that are used. It does regular case insensitive
* display name comparisons for most cases, however if the two items being
* compared are both single member types without children we compare them
* using their underlying data types, which takes feature type into account
* then displyaname.
*/
private static class DGIComparator implements Comparator<DataGroupInfo>
{
/** The Dti comp. */
private final Comparator<DataGroupInfo> myDGIComp = DataGroupInfo.CASE_INSENSITIVE_DISPLAY_NAME_COMPARATOR;
/** The DTI comp. */
private final DTIDisplayNameAndFeatureTypeComparator myDTIComp = new DTIDisplayNameAndFeatureTypeComparator();
@Override
public int compare(DataGroupInfo o1, DataGroupInfo o2)
{
boolean o1IsSingleTypeDGI = !o1.hasChildren() && o1.numMembers(false) == 1;
boolean o2IsSingleTypeDGI = !o2.hasChildren() && o2.numMembers(false) == 1;
if (o1IsSingleTypeDGI && o2IsSingleTypeDGI)
{
return myDTIComp.compare(o1.getMembers(false).iterator().next(), o2.getMembers(false).iterator().next());
}
else if (o1IsSingleTypeDGI && !o2IsSingleTypeDGI)
{
return 1;
}
else if (!o1IsSingleTypeDGI && o2IsSingleTypeDGI)
{
return -1;
}
else
{
return myDGIComp.compare(o1, o2);
}
}
}
/**
* The Class DTIDisplayNameAndFeatureTypeComparator.
*/
private static class DTIDisplayNameAndFeatureTypeComparator implements Comparator<DataTypeInfo>
{
/** The Dti comp. */
private final Comparator<DataTypeInfo> myDtiComp = DataTypeInfo.CASE_INSENSITIVE_DISPLAY_NAME_COMPARATOR;
@Override
public int compare(DataTypeInfo o1, DataTypeInfo o2)
{
MapVisualizationType mvt1 = getMapVisType(o1);
MapVisualizationType mvt2 = getMapVisType(o2);
int comparedByType = Integer.compare(mvt1.ordinal(), mvt2.ordinal());
return comparedByType == 0 ? myDtiComp.compare(o1, o2) : comparedByType;
}
}
}
|
#!/usr/bin/env bash
set -euo pipefail -o posix
cd "$(dirname "$0")"
# List all possible platforms
# NOTE: Not all of these combinations have been tested - this are just a list of
# combinations where a build is possible
platforms=(
darwin/386
darwin/amd64
freebsd/386
freebsd/amd64
freebsd/arm
linux/386
linux/amd64
linux/arm
linux/arm64
linux/ppc64
linux/ppc64le
linux/mips
linux/mipsle
linux/mips64
linux/mips64le
linux/s390x
)
targetDir=dist
mkdir -p "$targetDir"
packageName="kubefs"
package="."
for platform in "${platforms[@]}"
do
platform_split=(${platform//\// })
export GOOS=${platform_split[0]}
export GOARCH=${platform_split[1]}
out="$targetDir/$packageName-$GOOS-$GOARCH"
if [ $GOOS = "windows" ]; then
out+='.exe'
fi
echo "Building for $platform..."
go build -o "$out" "$package"
echo
done
|
// main.rs
#[macro_use]
extern crate failure;
#[macro_use]
extern crate structopt;
extern crate colored;
extern crate notify;
extern crate warp;
use structopt::StructOpt;
use notify::{Watcher, RecursiveMode, watcher};
use std::sync::mpsc::channel;
use std::time::Duration;
use std::path::PathBuf;
use warp::{Filter, http::Response, http::StatusCode};
mod buildcmd;
mod error;
mod parse;
mod runcmd;
#[derive(StructOpt)]
struct Cli {
#[structopt(parse(from_os_str))]
directory: PathBuf,
}
fn main() {
if let Err(e) = run() {
eprintln!("Error: {}", e);
std::process::exit(1);
}
}
fn run() -> Result<(), failure::Error> {
let args = Cli::from_args();
let (tx, rx) = channel();
let mut watcher = watcher(tx, Duration::from_secs(1))?;
watcher.watch(&args.directory, RecursiveMode::Recursive)?;
let build_route = warp::path("build")
.and(warp::post())
.map(|| {
match buildcmd::execute_build_command() {
Ok(_) => Response::builder()
.status(StatusCode::OK)
.body("Build triggered successfully".into())
.unwrap(),
Err(e) => Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(format!("Failed to trigger build: {}", e))
.unwrap(),
}
});
let routes = build_route.with(warp::log("build"));
tokio::spawn(warp::serve(routes).run(([127, 0, 0, 1], 3030));
loop {
match rx.recv() {
Ok(event) => {
if let notify::DebouncedEvent::Write(_) = event {
match buildcmd::execute_build_command() {
Ok(_) => println!("Build triggered due to file change"),
Err(e) => eprintln!("Failed to trigger build: {}", e),
}
}
}
Err(e) => eprintln!("Watcher error: {}", e),
}
}
} |
var myApp= angular
.module("manageStates",['toaster', 'ngAnimate'],function($interpolateProvider) {
$interpolateProvider.startSymbol('<%');
$interpolateProvider.endSymbol('%>');
})
.controller("StateController", function ($scope,$http,toaster) {
$scope.reverseSort=false;
$scope.district={
id:0,
state_id:0,
district_name:'',
operating_status:0,
};
$scope.state={
id:0,
name:'',
country_id:0,
};
$scope.state={
id:0,
name:'',
country_id:'',
Capital:'',
rto_state_code:'',
};
$scope.startRow=0;
$scope.rowLimit='';
$http.get("/Admin/Location/State/get-all-states")
.then(function(response) {
$scope.countries = response.data.countries;
$scope.states=response.data.states;
$scope.checkData($scope.states);
// console.log($scope.states);
$scope.rowLimit=$scope.states.length;
// $scope.sub_regions=response.data.sub_regions;
// console.log($scope.sub_regions [0]);
$scope.checkData($scope.states);
});
$scope.showEditStateForm=function(id,name,country_id,Capital,rto_state_code)
{
$scope.warning='';
document.getElementById('edit_form').style.display='block';
for (var i = 0; i < $scope.countries.length; i++) {
if($scope.countries[i].name==country_id)
$scope.country_id=$scope.countries[i].id;
}
$scope.state.id=id;
// console.log(rto_state_code);
$scope.state.name=name;
$scope.state.country_id=$scope.country_id;
$scope.state.Capital=Capital;
$scope.state.rto_state_code=rto_state_code;
// console.log($scope.district);
};
$scope.warning='';
$scope.country_name='';
$scope.checkData=function(cities){
if(cities[0])
{
document.getElementById('advanced_search').style.display='block';
document.getElementById('page-city-content').style.display='block';
document.getElementById('Error').style.display='none';
}
else
{
document.getElementById('advanced_search').style.display='none';
document.getElementById('page-city-content').style.display='none';
document.getElementById('Error').style.display='block';
}
};
$scope.updateDtateDetail=function(){
// console.log($scope.state);
for (var i = 0; i < $scope.countries.length; i++) {
if($scope.countries[i].id==$scope.state.country_id)
{
$scope.country_name=$scope.countries[i].name;
}
}
$scope.reverseSort=false;
$scope.validate=true;
for (var i = 0; i < $scope.states.length; i++) {
if(angular.lowercase($scope.states[i].name)==angular.lowercase($scope.state.name)
&&angular.lowercase($scope.states[i].country_id)==angular.lowercase($scope.country_name)
&&$scope.states[i].id!=$scope.state.id)
{
$scope.warning='State is already exist! ';
toaster.pop('warning','','State is already exist!');
$scope.validate=false;
}
}
if($scope.validate){
if ( $scope.state.country_id&&$scope.validate) {
$scope.warning='';
// console.log($scope.state);
document.getElementById('edit_form').style.display='none';
$http.post("/Admin/Location/State/update-state-detail",{
state:$scope.state,
// console.log($scope.city);
})
.then(function(response) {
$scope.states = response.data.states;
$scope.checkData($scope.states);
toaster.pop('success','','Edited data of State saved!');
});
}
else{
$scope.warning="Please fill the all fields!";
toaster.pop('warning','','Please fill the all fields!');
}
}
};
$scope.deleteState =function(id){
if(confirm('Deleted data will never recovered! Do you want delete it?')){
$http.post('/Admin/Location/State/delete-state',{
state:id,
}).then(function(response){
$scope.states=response.data.states;
$scope.checkData($scope.states);
toaster.pop('success','','State successfully deleted');
});
}
};
$scope.showAddNewStateForm=function(){
$scope.warning='';
$scope.state={};
document.getElementById('add_newstate_form').style.display='block';
};
$scope.addNewState=function(){
for (var i = 0; i < $scope.countries.length; i++) {
if($scope.countries[i].id==$scope.state.country_id)
{
$scope.country_name=$scope.countries[i].name;
}
}
$scope.reverseSort=false;
$scope.validate=true;
for (var i = 0; i < $scope.states.length; i++) {
if(angular.lowercase($scope.states[i].name)==angular.lowercase($scope.state.name)
&&angular.lowercase($scope.states[i].country_id)==angular.lowercase($scope.country_name)
&&$scope.states[i].id!=$scope.state.id)
{
$scope.warning='State is already exist! ';
// toastr.warning('fgh');
toaster.pop('warning','','State is already exist! ');
$scope.validate=false;
}
}
if($scope.validate){
if($scope.state.country_id){
$scope.reverseSort=false;
$scope.warning='';
document.getElementById('add_newstate_form').style.display='none';
$http.post('/Admin/Location/State/add-state',{
state:$scope.state,
}).then(function(response){
$scope.states=response.data.states;
$scope.checkData($scope.states);
toaster.pop('success','','State added successfully! ');
});
}
else
{
$scope.warning="Please fill the all fields!";
toaster.pop('warning','Empty fields','please fill the all fields! ');
}
}
};
/*
$scope.GetStatus=function(id){
if(id==1){
return 'Active';
}
else{
return 'Not Active';
}
};
$scope.GetState=function(id){
// alert('/Admin/Location/State/GetState/'+id);
for (var i = 0; i < $scope.states.length; i++) {
if($scope.states[i].id==id){
return $scope.states[i].name;
}
}
};
*/
$scope.SearchByCountry=function(){
// console.log($scope.state.id);
$http.post('/Admin/Location/State/search-state-by-country',{
country:$scope.state.country_id,
}).then(function(response){
if(!response.data.states[0]){
toaster.pop('error','No state available from selected country','');
}
else{
$scope.states=response.data.states;
}
// $scope.checkData($scope.states);
});
};
//=======================sotying=============/////////=================//=//////////////////=/=/==========
$scope.reverseSort=false;
$scope.sortColumn="id";
$scope.sortData=function(column){
// alert('fghhjjkkl');
$scope.reverseSort=($scope.sortColumn==column) ? !$scope.reverseSort :false;
$scope.sortColumn=column;
};
$scope.getSortClass=function(column){
if($scope.sortColumn==column){
return $scope.reverseSort ? 'arrow-down':'arrow-up' ;
}
else
{
return 'arrow-all';
}
};
}); |
import org.apache.commons.io.IOUtils;
import org.json.JSONObject;
public class NearbyRestaurantsSearch {
private static final String API_KEY = "Your_API_KEY_HERE";
public static void main(String[] args) {
try {
// Create the URL
String url = "https://api.mapbox.com/geocoding/v5/mapbox.places/" + args[0] + ".json?access_token=" + API_KEY;
// Create an HTTP connection
URLConnection connection = new URL(url).openConnection();
connection.connect();
// Parse the response to extract the restaurants
JSONObject response = new JSONObject(IOUtils.toString(connection.getInputStream()));
JSONArray restaurants = response.getJSONArray("features");
System.out.println("Found " + restaurants.length() + " restaurants");
for (int i = 0; i < restaurants.length(); i++) {
JSONObject restaurant = restaurants.getJSONObject(i);
System.out.println(restaurant.getString("text"));
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
} |
<filename>Bomberman/Source/Bomberman/BombermanGameModeBase.cpp<gh_stars>0
#include "BombermanGameModeBase.h"
// Sets default values
ABombermanGameModeBase::ABombermanGameModeBase()
{
// Set this actor to call Tick() every frame. You can turn this off to improve performance if you don't need it.
PrimaryActorTick.bCanEverTick = true;
}
// Called when the game starts or when spawned
void ABombermanGameModeBase::BeginPlay()
{
Super::BeginPlay();
UE_LOG(LogTemp, Warning, TEXT("Beginning Bomberman game..."));
//Set up the PlayerControllers
UWorld* world = GetWorld();
if (world)
{
//Capture the PlayerControllers for Player 1 and Player 2
playerControllers.Add(world->GetFirstPlayerController());
playerControllers.Add(UGameplayStatics::CreatePlayer(this, -1, true)); //We have to create a second PlayerController ourselves
// Position the Camera according to the map size (find the middle of the map),
// Then zoom it out according to the map height
// (Currently only works with ODD NUMBER map dimensions)
FVector cameraLocation = FVector(TILE_HEIGHT * (mapSize.Y - 1) / 2, TILE_WIDTH * (mapSize.X - 1) / 2, mapSize.Y * TILE_ZOOM);
playerControllers[0]->GetViewTarget()->SetActorLocation(cameraLocation); //Move the camera
}
//Generate a map grid with Y rows of tiles and X columns of tiles
//Spawn the players and tile objects appropriately
GenerateLevel(mapSize.X, mapSize.Y);
}
// Called every frame
void ABombermanGameModeBase::Tick(float DeltaTime)
{
Super::Tick(DeltaTime);
/*#if WITH_EDITOR
//Debug information to display on-screen which tile each player is currently standing on
FIntPoint p1Coords = GetTileCoords(player1->GetActorLocation());
FIntPoint p2Coords = GetTileCoords(player2->GetActorLocation());
GEngine->AddOnScreenDebugMessage(1, 2.0f, FColor::Green, FString::Printf(TEXT("Player 1 x: %i, y: %i"), p1Coords.X, p1Coords.Y));
GEngine->AddOnScreenDebugMessage(2, 2.0f, FColor::Green, FString::Printf(TEXT("Player 2 x: %i, y: %i"), p2Coords.X, p2Coords.Y));
#endif*/
}
//Spawn a bomb at a player's position and subtract a bomb from that player
void ABombermanGameModeBase::DropBomb(int32 playerID, int32 radius)
{
UWorld* world = GetWorld();
if (world)
{
FVector spawnLocation = FVector::ZeroVector;
//Find out which tile the player dropping this bomb is currently on
FIntPoint bombCoord = GetTileCoords(players[playerID]->GetActorLocation());
//If the player has bombs remaining, and there's not already an object on this tile, spawn a bomb
if (players[playerID]->GetNumBombs() > 0 && mapTiles[bombCoord.Y].rowTiles[bombCoord.X]->GetChildObject() == nullptr)
{
if (SpawnBomb) //Make sure the Blueprint has been properly linked
{
spawnLocation = mapTiles[bombCoord.Y].rowTiles[bombCoord.X]->GetActorLocation(); //Get the location at the CENTER of this tile
//Create the new bomb, and assign all its necessary data. We get most of this information from the PLAYER
ABomb* newBomb = world->SpawnActor<ABomb>(SpawnBomb, spawnLocation, FRotator::ZeroRotator);
newBomb->SetTileCoord(bombCoord);
newBomb->SetPlayerID(playerID);
newBomb->SetExplosionRadius(radius);
//Add the bomb as the child of the tile it's on. This allows it to get hit by chain explosions!!
mapTiles[bombCoord.Y].rowTiles[bombCoord.X]->SetChildObject(newBomb);
players[playerID]->DropBomb(); //Take one bomb away from the player who dropped this bomb
}
}
}
}
/*
* Trigger a bomb explosion from a specified point and refund the player's bomb
*
* This is not recursive, but it probably could be!
*/
void ABombermanGameModeBase::ExplodeBomb(int32 playerID, int32 radius, FIntPoint bombCoord)
{
UWorld* world = GetWorld(); //Bombs shouldn't explode while the game is paused. This allows us to reset the game cleanly,
if (world && !UGameplayStatics::IsGamePaused(world)) //without bombs exploding while we destroy and remake the level!
{
FVector location; //Temporary location variable we will use to spawn effects
//Quick array of points representing all 4 cardinal directions:
FIntPoint dir[4]{
FIntPoint(0,1), //UP
FIntPoint(1,0) , //RIGHT
FIntPoint(0,-1) , //DOWN
FIntPoint(-1,0) //LEFT
};
//The for loop represents each of the cardinal directions: UP, DOWN, LEFT, and RIGHT
for (int i = 0; i < 4; ++i)
{
bool hitWall = false; //Has the explosion hit a wall?
int dist = 1; //Distance away from the bomb in our current direction
do {
//First, get the adjacent tile's coordinates. I know this next line is unreadable, so bear with me...
// mapTiles is a TArray containing each ROW of the map
// rowTiles is a TArray of ATile* representing each tile in the row
// bombCoord represents the location we are checking our explosion from
// dir[i] represents a unit-length vector in a cardinal direction
// we multiply dir[i] by dist to get a tile in our current direction, at our current check radius
FIntPoint adjacentCoords = FIntPoint(bombCoord.X + dir[i].X * dist, bombCoord.Y + dir[i].Y * dist);
ATile* adjacentTile = mapTiles[adjacentCoords.Y].rowTiles[adjacentCoords.X];
//If that tile has an object on it, check if the object is breakable
if (adjacentTile->GetChildObject() != nullptr)
{
if (adjacentTile->GetChildObject()->GetDestructible())
{
//Destroy a breakable TileObject (if it's a bomb, this will trigger another explosion)
ATileObject* doomedObject = adjacentTile->GetChildObject();
adjacentTile->SetChildObject(nullptr); //The reason we SetChildObject(null) BEFORE Destroy is that powerups spawn when walls are destroyed.
doomedObject->Destroy(); //If we call SetChildObject(null) AFTER Destroy, the powerup has already been added to childObject, and overwrite its pointer.
}
else
{
//We have hit a wall (the only non-destructible TileObject, so don't continue checking in this direction
hitWall = true;
//Note:
//Because our level is surrounded by unbreakable walls,
//we don't need to error-check our coordinates
//(we'll never go below 0 or above the map width/height)
}
}
//Spawn an explosion effect on the tile we just checked, so long as it wasn't a wall
if (!hitWall && world && SpawnExplosionEffect)
{
location = adjacentTile->GetActorLocation();
world->SpawnActor<AActor>(SpawnExplosionEffect, location, FRotator::ZeroRotator);
}
++dist; //increment the distance for our next check
//If we haven't reached the explosion radius OR hit an unbreakable wall, repeat the process
} while (dist <= radius && !hitWall);
}
if (SpawnExplosionEffect)
{
//Spawn an explosion effect on the site of the explosion (where the bomb was)
location = mapTiles[bombCoord.Y].rowTiles[bombCoord.X]->GetActorLocation();
world->SpawnActor<AActor>(SpawnExplosionEffect, location, FRotator::ZeroRotator);
}
//Finally, remove the bomb which just blew up from its tile so we can place another bomb there later
mapTiles[bombCoord.Y].rowTiles[bombCoord.X]->SetChildObject(nullptr);
players[playerID]->AddBomb(); //Also refund the appropriate player's bomb
}
}
//Spawn a powerup at a breakable wall's position
void ABombermanGameModeBase::DropPowerup(FIntPoint powerupCoord)
{
UWorld* world = GetWorld(); //Powerups shouldn't spawn while the game is paused. This allows us to reset the game cleanly,
if (world && !UGameplayStatics::IsGamePaused(world)) //without breakables spawning new powerups as we destroy and remake the level!
{
int index = FMath::RandRange(0, SpawnPowerups.Num() - 1); //Randomly pick one of our available powerups
if (SpawnPowerups[index])
{
//Get the location of the tile on which the Powerup will spawn
FVector location = mapTiles[powerupCoord.Y].rowTiles[powerupCoord.X]->GetActorLocation();
APowerup* newPowerup = world->SpawnActor<APowerup>(SpawnPowerups[index], location, FRotator::ZeroRotator);
newPowerup->SetTileCoord(powerupCoord); //Set the map grid coordinates of the Powerup after it's spawned
mapTiles[powerupCoord.Y].rowTiles[powerupCoord.X]->SetChildObject(newPowerup); //Then attach it to it's appropriate tile
}
}
}
/*
* Clear a tile after its childObject has been destroyed
* This function exists purely a tile knows it's empty after a Player consumes its Powerup
*/
void ABombermanGameModeBase::ClearTile(FIntPoint tileCoord)
{
mapTiles[tileCoord.Y].rowTiles[tileCoord.X]->SetChildObject(nullptr);
}
/*
* Randomly generate a new map grid based on a specified map size
* The map grid can scale arbitrarily, but the wall pattern is based on the first level of the original Bomberman
*
* This function spawns:
* > TILES
* > WALLS
* > BREAKABLE WALLS
* > PLAYERS
*
* It also checks to make sure Players aren't "trapped" on spawn
*/
void ABombermanGameModeBase::GenerateLevel(int32 levelWidth, int32 levelHeight)
{
UWorld* world = GetWorld();
if (world)
{
FVector spawnLocation = FVector::ZeroVector;
FRotator spawnRotator = FRotator::ZeroRotator;
// Player1 spawns at playerSpawnOffset (starting from the bottom left of the map)
// Player2 spawns starting from the top right of the map, so we need to calculate his start point
FIntPoint player2SpawnOffset = FIntPoint(levelWidth - 1 - playerSpawnOffset.X, levelHeight - 1 - playerSpawnOffset.Y);
/////////////////////////////
//Build the map, tile by tile
/////////////////////////////
for (int32 i = 0; i < levelHeight; ++i)
{
FMapRow mapRow; //Build each row of the map grid...
for (int32 j = 0; j < levelWidth; ++j)
{
spawnLocation.X = i * TILE_WIDTH; //Set the world X position of each tile
spawnLocation.Y = j * TILE_HEIGHT; //Set the world Y position of each tile
if (SpawnTile)
{
mapRow.rowTiles.Add(world->SpawnActor<ATile>(SpawnTile, spawnLocation, spawnRotator)); //Spawn an empty tile
}
if (i == 0 || i == levelHeight - 1 || //If we're in the top or bottom rows of the level, spawn walls
j == 0 || j == levelWidth - 1 || //If we're on the left or right edges of the level, spawn walls
(i % 2 == 0 && j % 2 == 0)) //Also spawn walls at coordinates whose X and Y are odd-numbered
{
if (SpawnWall)
{
mapRow.rowTiles[j]->SetChildObject(world->SpawnActor<ATileObject>(SpawnWall, spawnLocation, spawnRotator));
mapRow.rowTiles[j]->GetChildObject()->SetTileCoord(FIntPoint(j, i));
}
}
else if (i == playerSpawnOffset.Y && j == playerSpawnOffset.X) //Spawn Player 1 offset from the bottom left of the map
{
if (SpawnP1)
{
//Spawn Player 1
players.Add(world->SpawnActor<ABombermanCharacter>(SpawnP1, spawnLocation, spawnRotator));
players[0]->SetPlayerID(0);
playerControllers[0]->Possess(players[0]); //Attach the first player to the first PlayerController
}
}
else if (i == player2SpawnOffset.Y && j == player2SpawnOffset.X) //Spawn Player 2 offset from the top right of the map
{
if (SpawnP2)
{
if (playerControllers[1]) //Because we created this PlayerController ourselves, let's make sure it exists...
{
//Spawn Player 2
players.Add(world->SpawnActor<ABombermanCharacter>(SpawnP2, spawnLocation, spawnRotator));
players[1]->SetPlayerID(1);
playerControllers[1]->Possess(players[1]); //Attach the second player to the second PlayerController
}
}
}
//Safety-check to ensure neither player spawns trapped by walls
else if (!( FMath::Abs(i - playerSpawnOffset.Y) == 1 && FMath::Abs(j - playerSpawnOffset.X) == 0) && //If we're not directly above or below Player 1
!(FMath::Abs(i - playerSpawnOffset.Y) == 0 && FMath::Abs(j - playerSpawnOffset.X) == 1) && //AND we're not directly left or right of Player 1
!(FMath::Abs(i - player2SpawnOffset.Y) == 1 && FMath::Abs(j - player2SpawnOffset.X) == 0) && //AND we're not directly above or below Player 2
!(FMath::Abs(i - player2SpawnOffset.Y) == 0 && FMath::Abs(j - player2SpawnOffset.X) == 1)) //AND we're not directly left or right of Player 2
{
if (FMath::RandRange(0.0f, 1.0f) >= 0.5f) //Spawn destructible walls on 50% of the remaining tiles
{
if (SpawnBreakable)
{
mapRow.rowTiles[j]->SetChildObject(world->SpawnActor<ATileObject>(SpawnBreakable, spawnLocation, spawnRotator));
mapRow.rowTiles[j]->GetChildObject()->SetTileCoord(FIntPoint(j, i));
}
}
}
}
mapTiles.Add(mapRow); //Store the completed map row
}
}
}
/*
* Reset the game - somebody has won, and has chosen to play again
*
* Iterate through all our stoarge containers Destroying the entire map as we go
* We also destroy active Explosions, and the remaining Player(s)
*/
void ABombermanGameModeBase::ResetGame()
{
//Remove the player pawns...
while (players.Num() > 0)
{
players[0]->Destroy();
players.RemoveAt(0);
}
UWorld* world = GetWorld();
if (world)
{
//Search the scene for any active Explosions
TArray<AActor*> explosions;
UGameplayStatics::GetAllActorsOfClass(world, SpawnExplosionEffect, explosions);
//Remove all remaining Explosions
while (explosions.Num() > 0)
{
explosions[0]->Destroy();
explosions.RemoveAt(0);
}
}
//Remove each row of the map
while (mapTiles.Num() > 0)
{
//Remove each tile from the row
while (mapTiles[0].rowTiles.Num() > 0)
{
//If the tile has a child, destroy it
if (mapTiles[0].rowTiles[0]->GetChildObject() != nullptr)
{
mapTiles[0].rowTiles[0]->GetChildObject()->Destroy();
}
mapTiles[0].rowTiles[0]->Destroy();
mapTiles[0].rowTiles.RemoveAt(0);
}
mapTiles.RemoveAt(0);
}
//Generate a new level from scratch
GenerateLevel(mapSize.X, mapSize.Y);
}
/*
* Helper Function: Translate a world position into tile coordinates on the map grid
*/
FIntPoint ABombermanGameModeBase::GetTileCoords(FVector pos)
{
//Our Tile Blueprints have their origin at the center, so we need to compensate by half a tile
pos.Y += TILE_WIDTH * 0.5f;
pos.X += TILE_HEIGHT * 0.5f;
//"Floor" the position to the bottom-left of the current tile
pos.Y -= FGenericPlatformMath::Fmod(pos.Y, TILE_WIDTH);
pos.X -= FGenericPlatformMath::Fmod(pos.X, TILE_HEIGHT);
//Divide by TILE_WIDTH and TILE_HEIGHT to calculate the tile we're on
return FIntPoint(pos.Y / TILE_WIDTH, pos.X / TILE_HEIGHT);
}
|
import turtle
def create_ISS(shape, lat, long):
# Create a turtle object representing the ISS
iss = turtle.Turtle()
# Set the shape of the ISS turtle
iss.shape(shape)
# Set the heading of the ISS turtle to face north (90 degrees)
iss.setheading(90)
# Calculate the screen coordinates based on latitude and longitude
# (Assuming latitude and longitude are in degrees)
x = (long + 180) * 2 # Scale longitude to fit the screen width
y = (90 - lat) * 2 # Scale latitude to fit the screen height
# Position the ISS turtle at the calculated coordinates
iss.penup() # Lift the pen to move without drawing
iss.goto(x, y) # Move the turtle to the calculated coordinates
iss.pendown() # Lower the pen to start drawing
# Call the function to simulate the ISS passing over Indianapolis
pass_over_Indy()
# Start the turtle graphics main loop to display the ISS movement
turtle.mainloop()
# Function to simulate the ISS passing over Indianapolis
def pass_over_Indy():
# Code to simulate the ISS passing over Indianapolis
pass |
# @Time : 2020/11/30
# @Author : <NAME>
# @Email : <EMAIL>
# UPDATE:
# @Time : 2020/12/18
# @Author : <NAME>
# @Email : <EMAIL>
import math
import re
from collections import Counter
from typing import List, Optional
import numpy as np
from nltk import ngrams
from nltk.translate.bleu_score import sentence_bleu
from sklearn.metrics.pairwise import cosine_similarity
from crslab.evaluator.metrics.base import AverageMetric, SumMetric
re_art = re.compile(r'\b(a|an|the)\b')
re_punc = re.compile(r'[!"#$%&()*+,-./:;<=>?@\[\]\\^`{|}~_\']')
re_space = re.compile(r'\s+')
class PPLMetric(AverageMetric):
def value(self):
return math.exp(super().value())
def normalize_answer(s):
"""
Lower text and remove punctuation, articles and extra whitespace.
"""
s = s.lower()
s = re_punc.sub(' ', s)
s = re_art.sub(' ', s)
s = re_space.sub(' ', s)
# s = ' '.join(s.split())
return s
class ExactMatchMetric(AverageMetric):
@staticmethod
def compute(guess: str, answers: List[str]) -> 'ExactMatchMetric':
if guess is None or answers is None:
return None
for a in answers:
if guess == a:
return ExactMatchMetric(1)
return ExactMatchMetric(0)
class F1Metric(AverageMetric):
"""
Helper class which computes token-level F1.
"""
@staticmethod
def _prec_recall_f1_score(pred_items, gold_items):
"""
Compute precision, recall and f1 given a set of gold and prediction items.
:param pred_items: iterable of predicted values
:param gold_items: iterable of gold values
:return: tuple (p, r, f1) for precision, recall, f1
"""
common = Counter(gold_items) & Counter(pred_items)
num_same = sum(common.values())
if num_same == 0:
return 0
precision = 1.0 * num_same / len(pred_items)
recall = 1.0 * num_same / len(gold_items)
f1 = (2 * precision * recall) / (precision + recall)
return f1
@staticmethod
def compute(guess: str, answers: List[str]) -> 'F1Metric':
if guess is None or answers is None:
return AverageMetric(0, 0)
g_tokens = guess.split()
scores = [
F1Metric._prec_recall_f1_score(g_tokens, a.split())
for a in answers
]
return F1Metric(max(scores), 1)
class BleuMetric(AverageMetric):
@staticmethod
def compute(guess: str, answers: List[str], k: int) -> Optional['BleuMetric']:
"""
Compute approximate BLEU score between guess and a set of answers.
"""
weights = [0] * 4
weights[k - 1] = 1
score = sentence_bleu(
[a.split(" ") for a in answers],
guess.split(" "),
weights=weights,
)
return BleuMetric(score)
class DistMetric(SumMetric):
@staticmethod
def compute(sent: str, k: int) -> 'DistMetric':
token_set = set()
for token in ngrams(sent.split(), k):
token_set.add(token)
return DistMetric(len(token_set))
class EmbeddingAverage(AverageMetric):
@staticmethod
def _avg_embedding(embedding):
return np.sum(embedding, axis=0) / (np.linalg.norm(np.sum(embedding, axis=0)) + 1e-12)
@staticmethod
def compute(hyp_embedding, ref_embeddings) -> 'EmbeddingAverage':
hyp_avg_emb = EmbeddingAverage._avg_embedding(hyp_embedding).reshape(1, -1)
ref_avg_embs = [EmbeddingAverage._avg_embedding(emb) for emb in ref_embeddings]
ref_avg_embs = np.array(ref_avg_embs)
return EmbeddingAverage(float(cosine_similarity(hyp_avg_emb, ref_avg_embs).max()))
class VectorExtrema(AverageMetric):
@staticmethod
def _extreme_embedding(embedding):
max_emb = np.max(embedding, axis=0)
min_emb = np.min(embedding, axis=0)
extreme_emb = np.fromiter(
map(lambda x, y: x if ((x > y or x < -y) and y > 0) or ((x < y or x > -y) and y < 0) else y, max_emb,
min_emb), dtype=float)
return extreme_emb
@staticmethod
def compute(hyp_embedding, ref_embeddings) -> 'VectorExtrema':
hyp_ext_emb = VectorExtrema._extreme_embedding(hyp_embedding).reshape(1, -1)
ref_ext_embs = [VectorExtrema._extreme_embedding(emb) for emb in ref_embeddings]
ref_ext_embs = np.asarray(ref_ext_embs)
return VectorExtrema(float(cosine_similarity(hyp_ext_emb, ref_ext_embs).max()))
class GreedyMatch(AverageMetric):
@staticmethod
def compute(hyp_embedding, ref_embeddings) -> 'GreedyMatch':
hyp_emb = np.asarray(hyp_embedding)
ref_embs = (np.asarray(ref_embedding) for ref_embedding in ref_embeddings)
score_max = 0
for ref_emb in ref_embs:
sim_mat = cosine_similarity(hyp_emb, ref_emb)
score_max = max(score_max, (sim_mat.max(axis=0).mean() + sim_mat.max(axis=1).mean()) / 2)
return GreedyMatch(score_max)
|
import React, { Component } from 'react';
import kb from '../../../../api/knowledgebase';
import Explainer from '../../../ui/scaffold/Explainer/Explainer';
export default class MetricDescription extends Component {
render() {
return (
<div className='MetricDescription'>
<h3>
Metric: {this.props.metric || 'Select a Metric'}
<Explainer
intro={<p>{kb.metricsReference[this.props.metric]}</p>}
knowledgebase='CSVMetrics'
/>
</h3>
</div>
)
}
} |
class Child(Parent):
def speak():
print("Hi")
# Overriding the speak()
def speak(self):
print("Hi there!") |
#!/bin/bash
set -e
npm run lint
npm run build:cli
npm run test:ci
CURRENT=$(echo $(npm version | grep xunit-viewer | cut -d"'" -f4))
git commit -am "tested $CURRENT"
npm version ${1-patch}
npm publish
LATEST=$(echo npm version | grep xunit-viewer | cut -d"'" -f4)
npm run release:demo
git add -A
git commit -am "release demo $LATEST"
git push
git push --tags
|
#!/bin/bash
echo -e "Collecting code stats (typescript errors & more)"
ERROR_COUNT_LIMIT=1005
DIRECTIVES_LIMIT=172
CONTROLLERS_LIMIT=139
ERROR_COUNT="$(./node_modules/.bin/tsc --project tsconfig.json --noEmit --strictNullChecks true | grep -oP 'Found \K(\d+)')"
DIRECTIVES="$(grep -r -o directive public/app/**/* | wc -l)"
CONTROLLERS="$(grep -r -oP 'class .*Ctrl' public/app/**/* | wc -l)"
if [ "$ERROR_COUNT" -gt $ERROR_COUNT_LIMIT ]; then
echo -e "Typescript strict errors $ERROR_COUNT exceeded $ERROR_COUNT_LIMIT so failing build"
exit 1
fi
if [ "$DIRECTIVES" -gt $DIRECTIVES_LIMIT ]; then
echo -e "Directive count $DIRECTIVES exceeded $DIRECTIVES_LIMIT so failing build"
exit 1
fi
if [ "$CONTROLLERS" -gt $CONTROLLERS_LIMIT ]; then
echo -e "Controllers count $CONTROLLERS exceeded $CONTROLLERS_LIMIT so failing build"
exit 1
fi
echo -e "Typescript errors: $ERROR_COUNT"
echo -e "Directives: $DIRECTIVES"
echo -e "Controllers: $CONTROLLERS"
if [ "${CIRCLE_BRANCH}" == "master" ]; then
./scripts/ci-metrics-publisher.sh \
grafana.ci-code.strictErrors="$ERROR_COUNT" \
grafana.ci-code.directives="$DIRECTIVES" \
grafana.ci-code.controllers="$CONTROLLERS"
fi
|
import { Schema, model } from 'mongoose';
const UserSchema = new Schema({
email: String,
password: String,
name: String,
title: String,
gender: String,
education: String,
experience: String,
isApproved: Boolean,
isAdmin: Boolean,
expNumber: { type: Number, min: 1, max: 10, required: true },
createdAt: { type: Date, default: Date.now() }
});
const User = model('User', UserSchema);
export default User;
|
#!/usr/bin/env bash
set -euf -o pipefail
if [ -z "$RPI_TOOLS" ]; then
echo "RPI_TOOLS not specified, exiting..." >> /dev/stderr
exit 1
fi
if [ -z "$BOOST_DIRECTORY" ]; then
echo "BOOST_DIRECTORY not specified, exiting..." >> /dev/stderr
exit 1
fi
mkdir -p build-arm
cd build-arm
cmake .. -G "${1:-Ninja}" -DCMAKE_TOOLCHAIN_FILE=../toolchain-rpi.cmake
cmake --build ./ --target "${2:-all}"
|
<gh_stars>10-100
package resolver
import (
"fmt"
"github.com/miekg/dns"
"github.com/chrisruffalo/gudgeon/util"
)
type multiSource struct {
name string
sources []Source
idx int
}
func newMultiSource(name string, sources []Source) Source {
ms := &multiSource{
name: name,
sources: sources,
}
return ms
}
func (ms *multiSource) Load(specification string) {
// no-op
}
func (ms *multiSource) Answer(rCon *RequestContext, context *ResolutionContext, request *dns.Msg) (*dns.Msg, error) {
for _, source := range ms.sources {
response, err := source.Answer(rCon, context, request)
if err == nil && !util.IsEmptyResponse(response) {
if context != nil {
context.SourceUsed = ms.Name() + "(" + source.Name() + ")"
}
return response, nil
}
}
return nil, fmt.Errorf("No source in multisource: '%s' had a response", ms.name)
}
func (ms *multiSource) Name() string {
return "ms:" + ms.name
}
func (ms *multiSource) Close() {
for idx := 0; idx < len(ms.sources); idx++ {
ms.sources[idx].Close()
}
}
|
#!/bin/bash
# variable holding the data URI
data=$1
useProxy=$2;
filename=$3
folder='/tmp/';
echo "Parameters: $data $useProxy $filename";
tempPreProcess="/tmp/$filename";
if [ -f "$folder$filename.nt.gz" ]; then
echo "Datadump available in: $folder$filename.nt.gz";
else
mkdir $tempPreProcess;
export IFS=","
for f in $data; do
echo "Downloading data dump from: $f";
cd "$tempPreProcess";
if [ $useProxy == "true" ]; then
file=$(LANG=C wget "$f" -e use_proxy=yes -e http_proxy=webcache.iai.uni-bonn.de:3128 -P $tempPreProcess 2>&1 | sed -n "s/Saving to: '\(.*\)'/\1/p") ;
else
file=$(LANG=C wget "$f" -P $tempPreProcess 2>&1 | sed -n "s/Saving to: '\(.*\)'/\1/p") ;
fi
cd "$tempPreProcess";
if [[ $file == *".html"* ]]
then
mv "$file" "$(uuidgen)"
fi
if [[ $file == *"?"* ]]; then
mv "$file" "$(uuidgen)"
fi
if [[ $file =~ \.tar$ ]];then
echo "Extract .tar file";
tar -xvf $file
rm -rf $file ;
elif [[ $file =~ \.tar.gz$ ]];then
echo "Extract .tar.gz file";
tar -xzvf $file
rm -rf $file ;
elif [[ $file =~ \.tar.bz$ ]];then
echo "Extract .tar.bz file";
tar -xzvf $file
rm -rf $file ;
elif [[ $file =~ \.gz$ ]];then
echo "Extract .gz file";
gunzip $file
rm -rf $file ;
elif [[ $file =~ \.zip$ ]];then
echo "Extract .zip file";
echo $file
unzip $file
rm -rf $file ;
elif [[ $file =~ \.bz2$ ]];then
echo "Extract .bz2 file";
bzip2 -dk $file
rm -rf $file ;
elif [[ $file =~ \.bz2$ ]];then
echo "Extract .bz2 file";
bzip2 -dk $file
rm -rf $file ;
elif [[ $file =~ \.tgz$ ]];then
echo "Extract .tgz file";
tar -xvzf $file
rm -rf $file ;
fi
done
cd "$tempPreProcess"
IFS=$'\n';
for f in $(find $tempPreProcess -name '*.*'); do
mv "$f" "$tempPreProcess"
done
for f in *; do
if [[ $f =~ \.nt$ ]];then
echo "Sorting Triples...";
sort -u "$f" > "$tempPreProcess/$f-sorted".nt
# mv "$filename_sorted.nt" > "$filename.nt"
else
if [[ $f =~ \.ttl$ ]]; then
echo "Converting Turtle file using SERDI";
serdi -b -f -i turtle -o ntriples -o ntriples "$f" > "$f".nt;
sort -m "$f".nt > "$tempPreProcess/$f-sorted".nt;
elif [[ $f =~ \.xml$ ]]; then
mv "$f" "$f.rdf"
echo "Converting file to NTriples and Sorting Triples...";
rapper -i guess -o ntriples "$f.rdf" > "$f".nt;
sort -m "$f".nt > "$tempPreProcess/$f-sorted".nt;
else
echo "Converting file to NTriples and Sorting Triples...";
rapper -i guess -o ntriples "$f" > "$f".nt;
sort -m "$f".nt > "$tempPreProcess/$f-sorted".nt;
fi
fi
done
# if we have just one file don't merge/sort
cnt=$(ls | wc -l)
if [ $cnt -gt 1 ]; then
echo "Merging Sorted Files...";
cat *-sorted.nt > "merged.nt";
echo "Cleaning...";
sort -u "merged.nt" > "cleaned.nt" ;
echo "$folder$filename.nt";
mv "cleaned.nt" "$folder$filename.nt";
else
mv "$tempPreProcess/$f-sorted".nt "$folder$filename.nt";
fi
echo "Gzipping NT file"
gzip "$folder$filename.nt"
#rm -rf "$tempPreProcess"
echo "Datadump available in: $folder$filename.nt.gz";
fi
|
import { ComplexBase } from '@syncfusion/ej2-react-base';
import { IndexesModel } from '@syncfusion/ej2-charts';
/**
* `SelectedDataIndex` directive represent the selected data in react Chart.
* It must be contained in a Chart component(`ChartComponent`).
* ```tsx
* <ChartComponent>
* <SelectedDataIndexesDirective>
* <SelectedDataIndexDirective></SelectedDataIndexDirective>
* </SelectedDataIndexesDirective>
* </ChartComponent>
* ```
*/
export class SelectedDataIndexDirective extends ComplexBase<IndexesModel & { children?: React.ReactNode }, IndexesModel> {
public static moduleName: string = 'selectedDataIndex';
}
export class SelectedDataIndexesDirective extends ComplexBase<{}, {}> {
public static propertyName: string = 'selectedDataIndexes';
public static moduleName: string = 'selectedDataIndexes';
}
|
#!/usr/bin/env bash
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
export VAI_ALVEO_ROOT="$( readlink -f "$( dirname "${BASH_SOURCE[0]}" )/.." )"
echo "------------------"
echo "Using VAI_ALVEO_ROOT"
echo "------------------"
echo $VAI_ALVEO_ROOT
echo ""
##############################
# Enable XILINX_XRM
##############################
echo "---------------------"
echo "Verifying XILINX_XRM"
echo "---------------------"
if [[ "$XDNN_XRM" -eq 1 ]]; then
export LD_LIBRARY_PATH=/opt/xilinx/xrm/lib:$LD_LIBRARY_PATH
echo "Using Xilinx XRM"
fi
echo "---------------------"
echo "Using LD_LIBRARY_PATH"
echo "---------------------"
echo $LD_LIBRARY_PATH
LIBXDNN_PATH=${CONDA_PREFIX}/lib/libxfdnn.so
if [ -f $LIBXDNN_PATH ]; then
echo "--------------------"
echo "Vitis-AI Flow"
echo "---------------------"
LD_LIBRARY_PATH=${CONDA_PREFIX}/lib:$LD_LIBRARY_PATH
else
echo "---------------------"
echo "Developer Flow"
echo "---------------------"
PYTHONPATH=${VAI_ALVEO_ROOT}:${VAI_ALVEO_ROOT}/apps/yolo:${VAI_ALVEO_ROOT}/apps/yolo:${VAI_ALVEO_ROOT}/xfmlp/python:${PYTHONPATH}
ln -s $VAI_ALVEO_ROOT/vai/dpuv1/tools/compile/bin/vai_c_tensorflow.py $CONDA_PREFIX/bin/vai_c_tensorflow
ln -s $CONDA_PREFIX/bin/decent_q $CONDA_PREFIX/bin/vai_q_tensorflow
MLSUITE_ROOT=$VAI_ALVEO_ROOT
export MLSUITE_ROOT
LIBXDNN_PATH=${VAI_ALVEO_ROOT}/vai/dpuv1/rt/xdnn_cpp/lib/libxfdnn.so
fi
export LIBXDNN_PATH
export PYTHONPATH
export LD_LIBRARY_PATH
echo "-------------------"
echo "Using LIBXDNN_PATH"
echo "-------------------"
echo $LIBXDNN_PATH
echo ""
echo "-------------------"
echo "PYTHONPATH"
echo "-------------------"
echo $PYTHONPATH
echo ""
export OMP_NUM_THREADS=4
export MKL_NUM_THREADS=4
export XBLAS_NUM_PREP_THREADS=4
export XRT_INI_PATH=${VAI_ALVEO_ROOT}/overlaybins/xrt.ini
##############################
# Enable XILINX_XRT
##############################
echo "---------------------"
echo "Verifying XILINX_XRT"
echo "---------------------"
if [ -f /opt/xilinx/xrt/include/version.h ]; then
info_xrt=$(cat /opt/xilinx/xrt/include/version.h | grep xrt_build_version\\[ | sed 's/[^0-9.]*//g')
major=$(echo ${info_xrt} | cut -f1 -d ".")
minor=$(echo ${info_xrt} | cut -f2 -d ".")
major_gt=$(expr ${major} \> 2)
major_eq=$(expr ${major} = 2)
minor_=$(expr ${minor} \>= 2)
# check version
if [ ${major_eq} -eq "1" ]; then
if [ ${minor_} -eq "0" ]; then
echo "Invalid XRT Version!"
exit -1
fi
elif [ ${major_gt} -eq "0" ]; then
echo "Invalid XRT Version!"
exit -1
fi
# enable XILINX_XRT
source /opt/xilinx/xrt/setup.sh
export XILINX_XRT=/opt/xilinx/xrt
else
echo "Xilinx XRT not found on machine!"
exit -1
fi
# Build NMS for YOLOv2 Demos
#make -C ${VAI_ALVEO_ROOT}/apps/yolo/nms
#export XBLAS_EMIT_PROFILING_INFO=1
|
<gh_stars>0
export const ROLE_ADMIN = 'ROLE_ADMIN';
|
<filename>jena-3.0.1/jena-arq/src/main/java/org/apache/jena/sparql/resultset/XMLInput.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.resultset;
import java.io.InputStream;
import java.io.Reader;
import org.apache.jena.query.ResultSet;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.sparql.SystemARQ;
/**
* Code that reads an XML Result Set and builds the ARQ structure for the same.
*/
public class XMLInput {
public static ResultSet fromXML(InputStream in) {
return fromXML(in, null);
}
public static ResultSet fromXML(InputStream in, Model model) {
return make(in, model).getResultSet();
}
public static ResultSet fromXML(Reader in) {
return fromXML(in, null);
}
public static ResultSet fromXML(Reader in, Model model) {
return make(in, model).getResultSet();
}
public static ResultSet fromXML(String str) {
return fromXML(str, null);
}
public static ResultSet fromXML(String str, Model model) {
return make(str, model).getResultSet();
}
public static boolean booleanFromXML(InputStream in) {
return make(in, null).getBooleanResult();
}
public static boolean booleanFromXML(String str) {
return make(str, null).getBooleanResult();
}
// Low level operations
public static SPARQLResult make(InputStream in) {
return make(in, null);
}
public static SPARQLResult make(InputStream in, Model model) {
if ( SystemARQ.UseSAX )
return new XMLInputSAX(in, model);
return new XMLInputStAX(in, model);
}
public static SPARQLResult make(Reader in) {
return make(in, null);
}
public static SPARQLResult make(Reader in, Model model) {
if ( SystemARQ.UseSAX )
return new XMLInputSAX(in, model);
return new XMLInputStAX(in, model);
}
public static SPARQLResult make(String str) {
return make(str, null);
}
public static SPARQLResult make(String str, Model model) {
if ( SystemARQ.UseSAX )
return new XMLInputSAX(str, model);
return new XMLInputStAX(str, model);
}
}
|
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source $DIR/../../conf.sh
cd $DIR
$PYTHON extract.py --file result_time.csv --queries 100
$PYTHON extract.py --file result_stholes.csv --queries 100
gnuplot 8_plot.gnuplot > model.pdf
|
def reverse_string(word):
rev = ""
for i in word:
rev = i + rev
return rev |
#! /bin/sh
# Get the OOPMH-LIB root directory from a makefile
OOMPH_ROOT_DIR=$(make -s --no-print-directory print-top_builddir)
#Set the number of tests to be checked
NUM_TESTS=1
# Setup validation directory
#---------------------------
rm -rf Validation
mkdir Validation
#######################################################################
# Validation for buckling third of a ring with displacement control
#------------------------------------------------------------------
cd Validation
mkdir RESLT
echo "Running steady third-of-a-ring validation "
../steady_third_ring > OUTPUT
echo "done"
echo " " >> validation.log
echo "Steady third-of-a-ring validation" >> validation.log
echo "---------------------------------" >> validation.log
echo " " >> validation.log
echo "Validation directory: " >> validation.log
echo " " >> validation.log
echo " " `pwd` >> validation.log
echo " " >> validation.log
cat RESLT/ring0.dat \
RESLT/ring5.dat \
RESLT/ring12.dat \
RESLT/ring20.dat \
RESLT/trace.dat\
> ring_results.dat
if test "$1" = "no_fpdiff"; then
echo "dummy [OK] -- Can't run fpdiff.py because we don't have python or validata" >> validation.log
else
../../../../bin/fpdiff.py ../validata/ring_results.dat.gz \
ring_results.dat >> validation.log
fi
# Append output to global validation log file
#--------------------------------------------
cat validation.log >> ../../../../validation.log
cd ..
#######################################################################
#Check that we get the correct number of OKs
# validate_ok_count will exit with status
# 0 if all tests has passed.
# 1 if some tests failed.
# 2 if there are more 'OK' than expected.
. $OOMPH_ROOT_DIR/bin/validate_ok_count
# Never get here
exit 10
|
#!/bin/sh
export MRUBY_CONFIG=`pwd`/.travis_config.rb
if [ -z "${MRUBY_VERSION}" ] ; then
export MRUBY_VERSION="3.0.0"
fi
if [ ! -d "./mruby/src" ]; then
git clone https://github.com/mruby/mruby.git
cd mruby
git fetch --tags
rev=`git rev-parse $MRUBY_VERSION`
git checkout $rev
cd ..
fi
(cd mruby; rake $1)
|
<reponame>typekev/react-mk
import React from 'react';
import { act } from 'react-dom/test-utils';
import Enzyme, { mount } from 'enzyme';
import Adapter from '@wojtekmaj/enzyme-adapter-react-17';
import Keyboard, { type as typeFunction } from '../src/Keyboard';
Enzyme.configure({ adapter: new Adapter() });
describe('Keyboard component', () => {
const setState = jest.fn<void, string[]>();
beforeEach(() => {
jest.spyOn<any, string>(React, 'useState').mockImplementation((init) => [init, setState]);
});
it('renders a child with the value Test', async () => {
let wrapper;
const text = 'Test';
await act(async () => {
wrapper = mount(<Keyboard>{text}</Keyboard>);
});
expect(wrapper.prop('children')).toBe(text);
wrapper.unmount();
});
it('renders a child with the value Test', async () => {
let wrapper;
const text = 'Test';
await act(async () => {
wrapper = mount(<Keyboard>{({ type }) => type(text)}</Keyboard>);
});
await act(async () => {
setTimeout(() => expect(wrapper.prop('children')).toBe(text), 2000);
});
wrapper.unmount();
});
it('renders a child with the value Test', async () => {
let wrapper;
const text = 'Test';
await act(async () => {
wrapper = mount(<Keyboard>{({ type }) => type(text, text, text)}</Keyboard>);
});
await act(async () => {
setTimeout(() => expect(setState).toHaveBeenCalledWith(text), 2000);
});
wrapper.unmount();
});
it('renders a an empty string', async () => {
let wrapper;
const setState = jest.fn<void, string[]>();
const text = '';
await act(async () => {
wrapper = mount(<Keyboard>{text}</Keyboard>);
});
await act(async () => {
setTimeout(() => expect(setState).toHaveBeenCalledWith(text), 2000);
});
wrapper.unmount();
});
it('returns an array of length 4', () => {
const text = 'Test';
expect(typeFunction(text, text, text, text).length).toBe(4);
});
});
|
<reponame>taoluwork/MarksWill
var MarksWill = artifacts.require("./MarksWill.sol");
contract('MarksWill', function() {
/* it("should assert true", function(done) {
var voting = Voting.deployed();
assert.isTrue(true);
done();
}); */
const will = MarksWill.deployed(1123234112,22,33);
it("When both passwords are correct and deadline is not reached", function(){
return will.then(instance=>{
//console.log(instance)
return instance.withDraw.call(22,33).then(result=>{
// console.log(result);
//expect(instance.validCandidate('a')).to.be.a(true);
assert.isTrue(result);
});
});
});
it("When one or both pieces of password are incorrect", function(){
return will.then(instance1=>{
//console.log(instance1)
return instance1.withDraw.call(11,23).then(result1=>{
// console.log(result1);
assert.isFalse(result1);
});
});
});
});
|
<reponame>jiawei397/deno-oak-nest<filename>src/decorators/module_test.ts
import { assert, assertEquals } from "../../test_deps.ts";
import {
defineModuleMetadata,
getModuleMetadata,
isModule,
Module,
} from "./module.ts";
Deno.test("isModule", () => {
assert(!isModule(true));
assert(!isModule(false));
assert(!isModule(""));
assert(!isModule("string"));
assert(!isModule({}));
assert(isModule({
module: "",
}));
const module = {};
defineModuleMetadata(module);
assert(isModule(module));
});
Deno.test("Module", () => {
class A {
}
class B {
}
const providers = [B, {
provide: "b",
useValue: "b",
}, {
provide: "c",
useFactory: () => "d",
}];
@Module({
imports: ["a", "b"],
controllers: [A],
providers,
})
class ModuleA {
}
assert(isModule(ModuleA));
assertEquals(getModuleMetadata("imports", ModuleA), ["a", "b"]);
assertEquals(getModuleMetadata("controllers", ModuleA), [A]);
assertEquals(getModuleMetadata("providers", ModuleA), providers);
});
|
#!/bin/sh
rm test.sqlite
sqlite3 test.sqlite < create_tables.sql |
<reponame>AtChina/XTION-PM<gh_stars>1-10
/**
* Author: changyingwei
* Create Date: 2015-03-24
* Description: 前端工作流帮助文档
*/
module.exports = function() {
'use strict';
var util = require('util');
process.stdout.write(util.format('\x1b[36m%s', 'Usage\n\n'));
process.stdout.write(util.format('\x1b[33m%s', ' node main.js [help] [test] [init] [build] [deploy] [publish] [default]\n\n'));
process.stdout.write(util.format('\x1b[36m%s', 'The most commonly available tasks are:\n\n'));
process.stdout.write(util.format('\x1b[32m%s', ' node main.js help: '));
process.stdout.write(util.format('\x1b[37m%s', 'get all task info\n\n'));
process.stdout.write(util.format('\x1b[32m%s', ' node main.js doc: '));
process.stdout.write(util.format('\x1b[37m%s', 'get front-end framework doc\n\n'));
process.stdout.write(util.format('\x1b[32m%s', ' node main.js test: '));
process.stdout.write(util.format('\x1b[37m%s', 'run front-end auto test\n\n'));
process.stdout.write(util.format('\x1b[32m%s', ' node main.js init: '));
process.stdout.write(util.format('\x1b[37m%s', 'auto exec bower install and npm install\n\n'));
process.stdout.write(util.format('\x1b[32m%s', ' node main.js build: '));
process.stdout.write(util.format('\x1b[37m%s', 'auto build less, combine compress js and images\n\n'));
process.stdout.write(util.format('\x1b[32m%s', ' node main.js deploy: '));
process.stdout.write(util.format('\x1b[37m%s', 'auto exec publish task then deploy to remote server\n\n'));
process.stdout.write(util.format('\x1b[32m%s', ' node main.js publish: '));
process.stdout.write(util.format('\x1b[37m%s', 'auto package and compress file to zip\n\n'));
process.stdout.write(util.format('\x1b[32m%s', ' node main.js default: '));
process.stdout.write(util.format('\x1b[37m%s', 'default task that start web server,you can do this with '));
process.stdout.write(util.format('\x1b[32m%s', 'node main.js\n\n'));
process.stdout.write(util.format('\x1b[36m%s', 'There are some demo url that can help you quickly develop web app:\n\n'));
process.stdout.write(util.format('\x1b[32m%s', ' http://localhost:8083/demo/ : '));
process.stdout.write(util.format('\x1b[37m%s', 'front-end demo page in ./Client/tests/demo/ directory\n\n'));
process.stdout.write(util.format('\x1b[32m%s', ' http://localhost:8083/*/*.html: '));
process.stdout.write(util.format('\x1b[37m%s', 'front-end test page in ./Client/tests/ directory \n\n'));
process.stdout.write(util.format('\x1b[32m%s', ' http://localhost:8083/layoutit/index.html : '));
process.stdout.write(util.format('\x1b[37m%s', 'xtion paas platform demo\n\n'));
};
|
package transcribe;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.concurrent.TimeUnit;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.RandomStringUtils;
import com.amazonaws.SdkClientException;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.AccessControlList;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.CopyObjectRequest;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectInputStream;
import com.amazonaws.services.transcribe.AmazonTranscribe;
import com.amazonaws.services.transcribe.AmazonTranscribeClient;
import com.amazonaws.services.transcribe.AmazonTranscribeClientBuilder;
import com.amazonaws.services.transcribe.model.DeleteTranscriptionJobRequest;
import com.amazonaws.services.transcribe.model.GetTranscriptionJobRequest;
import com.amazonaws.services.transcribe.model.GetTranscriptionJobResult;
import com.amazonaws.services.transcribe.model.LanguageCode;
import com.amazonaws.services.transcribe.model.Media;
import com.amazonaws.services.transcribe.model.StartTranscriptionJobRequest;
import com.amazonaws.services.transcribe.model.StartTranscriptionJobResult;
import com.amazonaws.services.transcribe.model.TranscriptionJob;
import com.amazonaws.services.transcribe.model.TranscriptionJobStatus;
import com.fasterxml.jackson.databind.ObjectMapper;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
public class Transcribe{
//@Autowired
//private Environment env;
private String bucketName = Constants.bucketName;
private Regions regions = Constants.awsSeerviceRegion;
public AmazonTranscribe transcribeClient() {
System.out.println("Intialize Transcribe Client");
AmazonTranscribe amazonTranscribe = AmazonTranscribeClientBuilder
.standard()
.withRegion(regions)
.build();
return amazonTranscribe;
}
public AmazonS3 s3Client() {
System.out.println("Intialize AWS S3 Client");
AmazonS3 s3client = AmazonS3ClientBuilder
.standard()
.withRegion(regions)
.build();
return s3client;
}
public void deleteFileFromAwsBucket(String fileName) {
System.out.println("Delete File from AWS Bucket "+fileName);
String key = fileName.replaceAll(" ", "_").toLowerCase();
//String newKey = key + "-" + System.currentTimeMillis();
//s3Client().copyObject(bucketName, key, bucketName, newKey);
try {
Thread.sleep(1000);
s3Client().deleteObject(bucketName, key);
}catch(Exception e) {
e.printStackTrace();
}
}
public void testFileFromAwsBucket(String fileName) {
System.out.println("Delete File from AWS Bucket "+fileName);
String key = fileName.replaceAll(" ", "_").toLowerCase();
//String newKey = key + "-" + System.currentTimeMillis();
//s3Client().copyObject(bucketName, key, bucketName, newKey);
S3Object s3object = null;
S3ObjectInputStream inputStream = null;
try {
Thread.sleep(1000);
s3object = s3Client().getObject(bucketName, key);
inputStream = s3object.getObjectContent();
File file = new File("/tmp/test2.wav");
if (file.createNewFile()) {
System.out.println("File created: " + file.getName());
}
//FileUtils.copyInputStreamToFile(inputStream, file);
//System.out.println("Object Size : "+inputStream.read());
PutObjectRequest put1 = new PutObjectRequest("sanjay-textract",
"test.wav",
file);
s3Client().putObject(put1.withCannedAcl(CannedAccessControlList.BucketOwnerFullControl));
CopyObjectRequest copy1 = new CopyObjectRequest("sanjay-transcribe","test.wav","sanjay-textract","test2.wav");
s3Client().copyObject(copy1.withCannedAccessControlList(CannedAccessControlList.BucketOwnerFullControl));
}catch(Exception e) {
e.printStackTrace();
}finally{
try{
if(s3object != null){
s3object.close();
}
if(inputStream != null){
inputStream.close();
}
}catch(Exception resExec){
resExec.printStackTrace();
}
}
}
public StartTranscriptionJobResult startTranscriptionJob(String key) {
System.out.println("Start Transcription Job By Key Before : "+key);
Media media = new Media().withMediaFileUri(s3Client().getUrl(bucketName, key).toExternalForm());
media.setMediaFileUri("s3://"+bucketName+"/"+key);
System.out.println("startTranscriptionJob :: media created... "+media.getMediaFileUri());
key = key.substring(key.indexOf("/")+1);
System.out.println("Start Transcription Job By Key After : "+key);
int length = 10;
boolean useLetters = true;
boolean useNumbers = false;
String generatedString = RandomStringUtils.random(length, useLetters, useNumbers);
System.out.println("startTranscriptionJob :: bucketName "+bucketName);
String jobName = key.concat(generatedString);
System.out.println("jobName : "+jobName);
StartTranscriptionJobRequest startTranscriptionJobRequest = new StartTranscriptionJobRequest()
.withLanguageCode(LanguageCode.EnUS).withTranscriptionJobName(jobName).withMedia(media);
System.out.println("Job request started...");
StartTranscriptionJobResult startTranscriptionJobResult = transcribeClient()
.startTranscriptionJob(startTranscriptionJobRequest);
System.out.println("Job result...");
return startTranscriptionJobResult;
}
public GetTranscriptionJobResult getTranscriptionJobResult(String jobName) {
System.out.println("Get Transcription Job Result By Job Name : "+jobName);
GetTranscriptionJobRequest getTranscriptionJobRequest = new GetTranscriptionJobRequest()
.withTranscriptionJobName(jobName);
Boolean resultFound = false;
TranscriptionJob transcriptionJob = new TranscriptionJob();
GetTranscriptionJobResult getTranscriptionJobResult = new GetTranscriptionJobResult();
while (resultFound == false) {
getTranscriptionJobResult = transcribeClient().getTranscriptionJob(getTranscriptionJobRequest);
transcriptionJob = getTranscriptionJobResult.getTranscriptionJob();
if (transcriptionJob.getTranscriptionJobStatus()
.equalsIgnoreCase(TranscriptionJobStatus.COMPLETED.name())) {
return getTranscriptionJobResult;
} else if (transcriptionJob.getTranscriptionJobStatus()
.equalsIgnoreCase(TranscriptionJobStatus.FAILED.name())) {
return null;
} else if (transcriptionJob.getTranscriptionJobStatus()
.equalsIgnoreCase(TranscriptionJobStatus.IN_PROGRESS.name())) {
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
System.out.println("Interrupted Exception {}"+e.getMessage());
}
}
}
return getTranscriptionJobResult;
}
public TranscriptionResponseDTO downloadTranscriptionResponse(String uri){
System.out.println("Download Transcription Result from Transcribe URi {}"+uri);
OkHttpClient okHttpClient = new OkHttpClient()
.newBuilder()
.connectTimeout(60, TimeUnit.SECONDS)
.writeTimeout(60, TimeUnit.SECONDS)
.readTimeout(60, TimeUnit.SECONDS)
.build();
Request request = new Request.Builder().url(uri).build();
Response response;
try {
response = okHttpClient.newCall(request).execute();
String body = response.body().string();
ObjectMapper objectMapper = new ObjectMapper();
response.close();
return objectMapper.readValue(body, TranscriptionResponseDTO.class);
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
public void deleteTranscriptionJob(String jobName) {
System.out.println("Delete Transcription Job from amazon Transcribe : "+jobName);
DeleteTranscriptionJobRequest deleteTranscriptionJobRequest = new DeleteTranscriptionJobRequest()
.withTranscriptionJobName(jobName);
transcribeClient().deleteTranscriptionJob(deleteTranscriptionJobRequest);
}
public TranscriptionResponseDTO extractSpeechTextFromAudio(String fileName) {
System.out.println("Request to extract Speech Text from Audio : "+fileName);
// Create a key that is like name for file and will be used for creating unique name based id for transcription job
String key = fileName.replaceAll(" ", "_").toLowerCase();
// Start Transcription Job and get result
//System.out.println("1...");
StartTranscriptionJobResult startTranscriptionJobResult = startTranscriptionJob(key);
// Get name of job started for the file
//System.out.println("2...");
String transcriptionJobName = startTranscriptionJobResult.getTranscriptionJob().getTranscriptionJobName();
// Get result after the procesiing is complete
//System.out.println("3...");
GetTranscriptionJobResult getTranscriptionJobResult = getTranscriptionJobResult(transcriptionJobName);
//delete file as processing is done
//System.out.println("4...");
deleteFileFromAwsBucket(key);
// Url of result file for transcription
//System.out.println("5...");
String transcriptFileUriString = getTranscriptionJobResult.getTranscriptionJob().getTranscript().getTranscriptFileUri();
// Get the transcription response by downloading the file
//System.out.println("6...");
TranscriptionResponseDTO transcriptionResponseDTO = downloadTranscriptionResponse(transcriptFileUriString);
//Delete the transcription job after finishing or it will get deleted after 90 days automatically if you do not call
deleteTranscriptionJob(transcriptionJobName);
return transcriptionResponseDTO;
}
public void uploadResponseFileToAwsBucket(String newKey, String data) {
System.out.println("uploadResponseFileToAwsBucket -> key : "+newKey);
try {
s3Client().putObject(bucketName, newKey, data);
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
def hex_to_rgb(hex_code):
if hex_code.startswith("#"):
l = hex_code[1:]
else:
l = hex_code
return [a / 255. for a in bytes.fromhex(l)] |
<gh_stars>1-10
package main.methodRemovedInSuperclass;
public abstract class SSMethodRemovedInSuperclass {
}
|
#!/bin/bash
# This publishes qiskit-superstaq to PyPI.
cd "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "$(git rev-parse --show-toplevel)"
python setup.py bdist_wheel
twine upload dist/* -u __token__ -p "$PYPI_API_KEY"
|
#!/usr/bin/env sh
yarn run build
git add dist/Naja.js dist/Naja.js.map
|
#!/bin/bash -e
cd $(dirname $0)
. d/buildaux/script_lib.sh
git_dep https://github.com/abseil/abseil-cpp.git abseil
git_dep https://github.com/tamaskenez/microlib.git microlib r1
git_dep https://github.com/fmtlib/fmt.git fmt
cmake_dep microlib --try-use-ide
cmake_dep abseil
cmake_dep fmt -DFMT_TEST=0 -DFMT_DOC=0
|
/* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* <p/>
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*/
package org.rzo.yajsw.script;
import groovy.lang.Binding;
import groovy.lang.GroovyClassLoader;
import groovy.lang.GroovyObject;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.vfs2.FileName;
import org.apache.commons.vfs2.FileObject;
import org.codehaus.groovy.control.CompilationFailedException;
import org.jboss.netty.logging.InternalLogger;
import org.rzo.yajsw.boot.WrapperLoader;
import org.rzo.yajsw.util.VFSUtils;
import org.rzo.yajsw.wrapper.WrappedJavaProcess;
import org.rzo.yajsw.wrapper.WrappedProcess;
/**
* The Class GroovyScript.
*/
public class GroovyScript extends AbstractScript
{
public static Map context = Collections.synchronizedMap(new HashMap());
/** The binding. */
final Binding binding;
final InternalLogger _logger;
volatile GroovyObject _script;
final boolean _reload;
final String _encoding;
/**
* Instantiates a new groovy script.
*
* @param script
* the script
* @param timeout
* @throws IOException
* @throws CompilationFailedException
* @throws IllegalAccessException
* @throws InstantiationException
* @throws ClassNotFoundException
*/
public GroovyScript(final String script, final String id, final WrappedProcess process, final String[] args, final int timeout, final InternalLogger logger, String encoding, boolean reload) throws CompilationFailedException, IOException,
InstantiationException, IllegalAccessException, ClassNotFoundException
{
super(script, id, process, args, timeout);
_reload = reload;
_encoding = encoding;
// let's call some method on an instance
_script = getScriptInstance(script, encoding);
binding = (Binding) _script.invokeMethod("getBinding", null);
binding.setVariable("args", args);
binding.setVariable("callCount", 0);
binding.setVariable("context", context);
if (process != null && logger == null)
_logger = process.getInternalWrapperLogger();
else
_logger = logger;
binding.setVariable("logger", _logger);
}
private void setGroovyClasspath(GroovyClassLoader loader)
{
ArrayList cp = WrapperLoader.getGroovyClasspath();
for (Iterator it = cp.listIterator(); it.hasNext(); )
loader.addURL((URL)it.next());
}
static GroovyClassLoader groovyClassLoader;
private GroovyObject getScriptInstance(String scriptFileName, String encoding) throws IOException, InstantiationException,
IllegalAccessException, ClassNotFoundException
{
FileObject fileObject = VFSUtils.resolveFile(".", scriptFileName);
FileName fileName = fileObject.getName();
long lastModified = fileObject.getContent().getLastModifiedTime();
String scriptName = StringUtils.removeEnd(fileName.getBaseName(), "." + fileName.getExtension()) + "_"
+ lastModified;
synchronized (GroovyScript.class)
{
if (groovyClassLoader == null)
{
groovyClassLoader = new GroovyClassLoader(getClass().getClassLoader());
setGroovyClasspath(groovyClassLoader);
}
try
{
Class clazz = Class.forName(scriptName, true, groovyClassLoader);
if (_script == null)
return (GroovyObject) clazz.newInstance();
else
return _script;
}
catch (ClassNotFoundException e)
{
if (_script != null)
log("script changed -> reloading");
InputStream in = null;
String scriptSrc = null;
try
{
in = fileObject.getContent().getInputStream();
if (encoding == null)
scriptSrc = IOUtils.toString(in);
else
scriptSrc = IOUtils.toString(in, encoding);
}
finally
{
if (in != null)
in.close();
}
return (GroovyObject) groovyClassLoader.parseClass(scriptSrc, scriptName + ".groovy").newInstance();
}
}
}
/*
* (non-Javadoc)
*
* @see org.rzo.yajsw.script.AbstractScript#execute(java.lang.String,
* java.lang.String, java.lang.String, java.lang.String, java.lang.String,
* java.lang.String, java.lang.Object)
*/
synchronized public Object execute(String line)
{
Object result = null;
if (_script == null)
{
System.out.println("cannot execute script " + _name);
return null;
}
if (_reload)
{
GroovyObject script = null;
try
{
script = getScriptInstance(_name, _encoding);
}
catch (Exception e)
{
e.printStackTrace();
}
if (script != null)
{
if (_script != script)
{
script.invokeMethod("setBinding", binding);
_script = script;
}
}
}
binding.setVariable("id", _id);
if (_process != null)
{
binding.setVariable("state", _process.getStringState());
binding.setVariable("count", _process.getRestartCount());
binding.setVariable("pid", _process.getAppPid());
binding.setVariable("exitCode", _process.getExitCode());
binding.setVariable("line", line);
binding.setVariable("process", _process);
}
try
{
result = _script.invokeMethod("run", new Object[]{});
}
catch (Throwable e)
{
if (_logger != null)
_logger.info("execption in script "+this._name, e);
else
e.printStackTrace();
}
binding.setVariable("callCount", ((Integer) binding.getVariable("callCount")).intValue() + 1);
return result;
}
public static void main(String[] args) throws Exception, IOException, InstantiationException, IllegalAccessException
{
WrappedJavaProcess w = new WrappedJavaProcess();
w.getLocalConfiguration().setProperty("wrapper.config", "conf/wrapper.helloworld.conf");
w.init();
GroovyScript script = new GroovyScript("./scripts/timeCondition.gv", "id", w, new String[]
{ "11", "12" }, 0, null, null, false);
script.execute();
script.execute();
script = new GroovyScript("./scripts/fileCondition.gv", "id", w, new String[]
{ "anchor.lck" }, 0, null, null, false);
script.execute();
script.execute();
script = new GroovyScript("./scripts/snmpTrap.gv", "id", w, new String[]
{ "192.168.0.1", "1", "msg" }, 0, null, null, false);
script.execute();
}
public Object execute()
{
return execute("");
}
public void executeWithTimeout()
{
executeWithTimeout("");
}
public void interrupt()
{
if (_future != null)
_future.cancel(true);
}
void log(String msg)
{
if (_logger != null)
_logger.info(msg);
else
System.out.println(msg);
}
public Object invoke(String method, Object ... x )
{
Object result = null;
try
{
result = _script.invokeMethod(method, x);
}
catch (Exception ex)
{
ex.printStackTrace();
}
return result;
}
}
|
<gh_stars>1-10
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <Python.h>
#include <folly/Executor.h>
#include <folly/Function.h>
#include <folly/ScopeGuard.h>
#include <folly/io/IOBuf.h>
#if PY_VERSION_HEX < 0x03040000
#define PyGILState_Check() (true)
#endif
namespace thrift {
namespace py3 {
struct PyBufferData {
folly::Executor* executor;
PyObject* py_object;
};
std::unique_ptr<folly::IOBuf> iobuf_from_python(
folly::Executor* executor,
PyObject* py_object,
void* buf,
uint64_t length) {
Py_INCREF(py_object);
auto* userData = new PyBufferData();
userData->executor = executor;
userData->py_object = py_object;
return folly::IOBuf::takeOwnership(
buf,
length,
[](void* buf, void* userData) {
auto* py_data = (PyBufferData*)userData;
auto* py_object = py_data->py_object;
if (PyGILState_Check()) {
Py_DECREF(py_object);
} else if (py_data->executor) {
py_data->executor->add(
[py_object]() mutable { Py_DECREF(py_object); });
} else {
/*
This is the last ditch effort. We don't have the GIL and we have no
asyncio executor. In this case we will attempt to use the
pendingCall interface to cpython. This is likely to fail under
heavy load due to lock contention.
*/
int ret = Py_AddPendingCall(
[](void* userData) {
Py_DECREF((PyObject*)userData);
return 0;
},
(void*)py_object);
if (ret != 0) {
LOG(ERROR)
<< "an IOBuf was created from a non-asyncio thread, and all attempts "
<< "to free the underlying buffer has failed, memory has leaked!";
} else {
LOG(WARNING)
<< "an IOBuf was created from a non-asyncio thread, and we successful "
<< "handled cleanup but this is not a reliable interface, it will fail "
<< "under heavy load, do not create IOBufs from non-asyncio threads. ";
}
}
delete py_data;
},
userData);
}
bool check_iobuf_equal(const folly::IOBuf* a, const folly::IOBuf* b) {
return folly::IOBufEqualTo{}(a, b);
}
bool check_iobuf_less(const folly::IOBuf* a, const folly::IOBuf* b) {
return folly::IOBufLess{}(a, b);
}
} // namespace py3
} // namespace thrift
|
package update
import (
"fmt"
"io/ioutil"
"os"
"path"
"testing"
"github.com/experimental-platform/platconf/platconf"
"github.com/jarcoal/httpmock"
"github.com/stretchr/testify/assert"
)
func TestUpdateSetupPaths(t *testing.T) {
tempDir, err := ioutil.TempDir("", "")
assert.Nil(t, err)
defer os.RemoveAll(tempDir)
err = setupPaths(tempDir)
assert.Nil(t, err)
requiredPaths := []string{
"/etc/systemd/journald.conf.d",
"/etc/systemd/system",
"/etc/systemd/system/docker.service.d",
"/etc/systemd/system/scripts",
"/etc/udev/rules.d",
"/opt/bin",
}
for _, p := range requiredPaths {
fileinfo, err := os.Stat(path.Join(tempDir, p))
assert.Nil(t, err)
assert.True(t, fileinfo.IsDir())
}
}
func TestFetchReleaseJSON(t *testing.T) {
testBody := "foobarteststring"
testChannel := "WhateverTheF"
testChannelNoAccess := "GoAwayChannel"
httpmock.Activate()
defer httpmock.DeactivateAndReset()
mockURL1 := fmt.Sprintf("https://raw.githubusercontent.com/protonet/builds/master/manifest-v2/%s.json", testChannel)
httpmock.RegisterResponder("GET", mockURL1, httpmock.NewStringResponder(200, testBody))
mockURL2 := fmt.Sprintf("https://raw.githubusercontent.com/protonet/builds/master/manifest-v2/%s.json", testChannelNoAccess)
httpmock.RegisterResponder("GET", mockURL2, httpmock.NewStringResponder(403, "Access denied."))
data, err := fetchReleaseJSONv2(testChannel)
assert.Nil(t, err)
assert.Equal(t, len(testBody), len(data))
_, err = fetchReleaseJSONv2(testChannelNoAccess)
assert.NotNil(t, err)
_, err = fetchReleaseJSONv2("noSuchChannel")
assert.NotNil(t, err)
}
func TestFetchReleaseData(t *testing.T) {
testBody := `{
"build": 12345,
"codename": "Kaufman",
"url": "https://www.example.com/",
"published_at": "1990-12-31T23:59:60Z",
"images": [
{
"name": "quay.io/experiementalplatform/geilerserver",
"tag": "v1.2.3.4",
"pre_download": true
},
{
"name": "quay.io/protonet/rickroll",
"tag": "latest",
"pre_download": false
}
]
}`
testBrokenJSON := "213ewqsd"
expectedJSON := platconf.ReleaseManifestV2{
Build: 12345,
Codename: "Kaufman",
ReleaseNotesURL: "https://www.example.com/",
PublishedAt: "1990-12-31T23:59:60Z",
Images: []platconf.ReleaseManifestV2Image{
platconf.ReleaseManifestV2Image{
Name: "quay.io/experiementalplatform/geilerserver",
Tag: "v1.2.3.4",
PreDownload: true,
},
platconf.ReleaseManifestV2Image{
Name: "quay.io/protonet/rickroll",
Tag: "latest",
PreDownload: false,
},
},
}
testChannel := "WhateverTheF"
testChannelBrokenJSON := "SomeOtherChan"
httpmock.Activate()
defer httpmock.DeactivateAndReset()
mockURL1 := fmt.Sprintf("https://raw.githubusercontent.com/protonet/builds/master/manifest-v2/%s.json", testChannel)
httpmock.RegisterResponder("GET", mockURL1, httpmock.NewStringResponder(200, testBody))
mockURL2 := fmt.Sprintf("https://raw.githubusercontent.com/protonet/builds/master/manifest-v2/%s.json", testChannelBrokenJSON)
httpmock.RegisterResponder("GET", mockURL2, httpmock.NewStringResponder(200, testBrokenJSON))
manifest, err := fetchReleaseDataV2(testChannel)
assert.Nil(t, err)
assert.NotNil(t, manifest)
assert.EqualValues(t, expectedJSON, *manifest)
_, err = fetchReleaseDataV2(testChannelBrokenJSON)
assert.NotNil(t, err)
}
|
#!/bin/sh
set -e
luacheck package scripts targets
|
import {
andThen, head, ifElse, inc, pipe, prop, uncurryN,
} from 'ramda';
import useCollection from './useCollection';
import isNilOrEmpty from './internal/isNilOrEmpty';
import { docId, withoutId } from './internal/id';
/**
* Handles the result of collection.insertOne in MongoDB Driver in a way that
* it picks only the inserted document from the result.
*
* @func
* @since v0.1.0
* @param {object} cursorResult Cursor result of insertOne function.
* @return {object} Inserted document.
* */
const handleInsertOneResult = pipe(prop('ops'), head);
/**
* Handles the result of collection.findOneAndUpdate in MongoDB Driver in a way that
* it picks only the inserted document from the result.
*
* @func
* @since v0.1.0
* @param {object} cursorResult Cursor result of findOneAndUpdate function.
* @return {object} Updated document.
* */
const handleFindOneAndUpdateResult = prop('value');
/**
* Takes a {@link MongoClientLike}, a database name, a collection name, and a doc to be upserted,
* then returns `Promise` which resolves upserted `Document` in specified `Collection`.
*
* If `doc` has an _id field, then it will try to update it by finding the document with given id,
* otherwise it will upsert the given `doc`.
*
* It is a curried function so it can be partially recomposed.
* Since [Ramda](https://ramdajs.com/) is used for currying, you can also use [R.__](https://ramdajs.com/docs/#__)
* placeholder to allow partial application of any combination of arguments of this particular function.
*
* @func findBy
* @since v0.1.0
* @param {MongoClientLike} client {@link MongoClient} instance
* @param {string} databaseName Database name to get the collection from.
* @param {string} collectionName Collection name to upsert the document in.
* @param {object} doc The Document to be upserted.
* @return {Promise<Object>} Upserted document
* @example
*
* const client = createClient(...params);
* const insertDocument = { name: '<NAME>', surname: 'some surname' };
* upsert(client, 'databaseName', 'collectionName', insertDocument)
* .then(console.log); // this should insert given document and returns inserted version
*
* const updateDocument = {
* _id: 'id of the document',
* name: 'some updated name',
* };
* upsert(client, 'databaseName', 'collectionName', updateDocument)
* .then(console.log); // this should update given document and returns updated version
*
* // partial re-usability
* const upsertCategory = upsert(client, 'someDb', 'categories');
* upsertCategory({ name: 'some name' }).then(console.log);
* upsertCategory({ _id: 'some id', approved: false }).then(console.log);
*
* const upsertInSomeDb = findBy(client, 'someDb')
* upsertInSomeDb('categories', { name: 'some name' }).then(console.log);
* upsertInSomeDb('comments', { _id: 'some id', approved: false }).then(console.log);
*/
const upsert = uncurryN(
inc(useCollection.length),
pipe(
useCollection,
uncurryN(
2,
(collectionPromise) => (doc) => andThen(
ifElse(
() => isNilOrEmpty(docId(doc)),
// insert when id is nil
(collection) => andThen(handleInsertOneResult, collection.insertOne(doc)),
// else update the document by finding it with its id
(collection) => andThen(
handleFindOneAndUpdateResult,
collection.findOneAndUpdate(
{ _id: docId(doc) },
{ $set: withoutId(doc) },
{ upsert: true, returnOriginal: false },
),
),
),
collectionPromise,
),
),
),
);
export default upsert;
|
package com.bc.ui.treebuilder;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.JScrollPane;
import javax.swing.JTree;
import javax.swing.tree.TreeNode;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
/**
* @author poshjosh
*/
public class ReadMe {
public static void main(String [] args) {
TreeBuilderFactory treeBuilderFactory = new TreeBuilderFactoryImpl();
// For displaying each JTree
//
JScrollPane scrollPane = new JScrollPane();
scrollPane.setBounds(0, 0, 300, 300);
final JFileChooser chooser = new JFileChooser();
chooser.setMultiSelectionEnabled(false);
chooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
chooser.setDialogTitle("Select HTML/XML file");
final int selection = chooser.showDialog(null, "View Document Structure");
if(selection == JFileChooser.APPROVE_OPTION) {
// DocumentTreeBuilder
//
Document doc = loadDocument(chooser.getSelectedFile());
DOMTreeBuilder domTreeBuilder = treeBuilderFactory.getDOMInstance();
Filter<Node> nodeFilter = null; // May be null
TreeNode docRootNode = domTreeBuilder.build(doc, nodeFilter);
JTree documentTree = new JTree(docRootNode);
// Display the JTree
//
scrollPane.setViewportView(documentTree);
JOptionPane.showMessageDialog(null, scrollPane);
}
// FileTreeBuilder
//
File dir = new File(System.getProperty("user.home")+"/Documents");
TreeBuilder<File> fileTreeBuilder = treeBuilderFactory.getInstance(TreeBuilderFactory.FILE);
// This also works
//FileTreeBuilder fileTreeBuilder = treeBuilderFactory.getFileInstance();
Filter<File> fileFilter = new Filter<File>() {
@Override
public boolean accept(File f) {
return f.isDirectory() || f.getName().endsWith(".docx");
}
};
TreeNode fileRootNode = fileTreeBuilder.build(dir, fileFilter);
JTree fileTree = new JTree(fileRootNode);
// Display the JTree
//
scrollPane.setViewportView(fileTree);
JOptionPane.showMessageDialog(null, scrollPane);
// MapTreeBuilder
//
MapTreeBuilder mapTreeBuilder = treeBuilderFactory.getMapInstance();
final HashMap map = new HashMap();
map.put("boolean", Boolean.TRUE);
map.put("number", 100);
map.put("List", new String[]{"1", "2", "3"});
HashMap grandChildren = new HashMap();
grandChildren.put("grandChild", "I am a grand child");
map.put("hasChildren", grandChildren);
Map.Entry rootEntry = mapTreeBuilder.createRootEntry(map);
TreeNode mapRootNode = mapTreeBuilder.build(rootEntry, null);
JTree mapTree = new JTree(mapRootNode);
// Display the JTree
//
scrollPane.setViewportView(mapTree);
JOptionPane.showMessageDialog(null, scrollPane);
}
private static Document loadDocument(File file) {
Document doc;
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = factory.newDocumentBuilder();
doc = docBuilder.parse(file);
}catch (SAXException | IOException | ParserConfigurationException e) {
e.printStackTrace();
doc = null;
}
return doc;
}
}
|
<gh_stars>0
/*
* Copyright 2014-2020 chronicle.software
*
* http://www.chronicle.software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.openhft.chronicle.logger.jul;
import net.openhft.chronicle.core.OS;
import net.openhft.chronicle.core.util.Time;
import java.util.logging.LogManager;
class JulLoggerTestBase extends JulTestBase {
// *************************************************************************
//
// *************************************************************************
static String basePath() {
String path = System.getProperty("java.io.tmpdir");
String sep = System.getProperty("file.separator");
if (!path.endsWith(sep)) {
path += sep;
}
return path + "chronicle-jul-api";
}
static String basePath(String loggerName) {
return basePath()
+ System.getProperty("file.separator")
+ loggerName;
}
// *************************************************************************
//
// *************************************************************************
static void setupLogger(Class<?> testName) {
setupLogger(testName.getSimpleName());
}
static void setupLogger(String id) {
System.setProperty(
"java.util.logging.manager",
ChronicleLoggerManager.class.getName());
System.setProperty(
"sun.util.logging.disableCallerCheck",
"false");
System.setProperty(
"chronicle.logger.properties",
id.endsWith(".properties") ? id : id + ".properties");
LogManager.getLogManager().reset();
}
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2704-1
#
# Security announcement date: 2015-08-05 00:00:00 UTC
# Script generation date: 2017-01-01 21:04:43 UTC
#
# Operating System: Ubuntu 12.04 LTS
# Architecture: i386
#
# Vulnerable packages fix on version:
# - swift:1.4.8-0ubuntu2.5
#
# Last versions recommanded by security team:
# - swift:1.4.8-0ubuntu2.5
#
# CVE List:
# - CVE-2014-7960
# - CVE-2015-1856
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade swift=1.4.8-0ubuntu2.5 -y
|
<reponame>zhihuilai/read-bigint
import { getFirstAndLast } from "./common";
// https://github.com/nodejs/node/blob/v13.9.0/lib/internal/buffer.js#L83-L101
export function readBigUInt64LE(buffer: Buffer, offset = 0): bigint {
const { first, last } = getFirstAndLast(buffer, offset);
const lo =
first +
buffer[++offset] * 2 ** 8 +
buffer[++offset] * 2 ** 16 +
buffer[++offset] * 2 ** 24;
const hi =
buffer[++offset] +
buffer[++offset] * 2 ** 8 +
buffer[++offset] * 2 ** 16 +
last * 2 ** 24;
return BigInt(lo) + (BigInt(hi) << BigInt(32));
}
|
package text
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestIDs(t *testing.T) {
assert.Equal(t, 1010000, int(InfoSelfServiceLogin))
assert.Equal(t, 1020000, int(InfoSelfServiceLogout))
assert.Equal(t, 1030000, int(InfoSelfServiceMFA))
assert.Equal(t, 1040000, int(InfoSelfServiceRegistration))
assert.Equal(t, 1050000, int(InfoSelfServiceSettings))
assert.Equal(t, 1050001, int(InfoSelfServiceSettingsUpdateSuccess))
assert.Equal(t, 1060000, int(InfoSelfServiceRecovery))
assert.Equal(t, 1060001, int(InfoSelfServiceRecoverySuccessful))
assert.Equal(t, 1060002, int(InfoSelfServiceRecoveryEmailSent))
assert.Equal(t, 1070000, int(InfoSelfServiceVerification))
assert.Equal(t, 4000000, int(ErrorValidation))
assert.Equal(t, 4000001, int(ErrorValidationGeneric))
assert.Equal(t, 4000002, int(ErrorValidationRequired))
assert.Equal(t, 4010000, int(ErrorValidationLogin))
assert.Equal(t, 4010001, int(ErrorValidationLoginRequestExpired))
assert.Equal(t, 4040000, int(ErrorValidationRegistration))
assert.Equal(t, 4040001, int(ErrorValidationRegistrationRequestExpired))
assert.Equal(t, 4060000, int(ErrorValidationRecovery))
assert.Equal(t, 4060001, int(ErrorValidationRecoveryRetrySuccess))
assert.Equal(t, 4060002, int(ErrorValidationRecoveryStateFailure))
assert.Equal(t, 4070000, int(ErrorValidationVerification))
assert.Equal(t, 4070001, int(ErrorValidationVerificationTokenInvalidOrAlreadyUsed))
assert.Equal(t, 5000000, int(ErrorSystem))
}
|
#!/bin/sh
# Copyright (c) 2014-2016 The Pershyancoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
DIR=$(dirname "$0")
[ "/${DIR#/}" != "$DIR" ] && DIR=$(dirname "$(pwd)/$0")
echo "Using verify-commits data from ${DIR}"
VERIFIED_ROOT=$(cat "${DIR}/trusted-git-root")
VERIFIED_SHA512_ROOT=$(cat "${DIR}/trusted-sha512-root-commit")
REVSIG_ALLOWED=$(cat "${DIR}/allow-revsig-commits")
HAVE_GNU_SHA512=1
[ ! -x "$(which sha512sum)" ] && HAVE_GNU_SHA512=0
if [ x"$1" = "x" ]; then
CURRENT_COMMIT="HEAD"
else
CURRENT_COMMIT="$1"
fi
if [ "${CURRENT_COMMIT#* }" != "$CURRENT_COMMIT" ]; then
echo "Commit must not contain spaces?" > /dev/stderr
exit 1
fi
VERIFY_TREE=0
if [ x"$2" = "x--tree-checks" ]; then
VERIFY_TREE=1
fi
NO_SHA1=1
PREV_COMMIT=""
INITIAL_COMMIT="${CURRENT_COMMIT}"
while true; do
if [ "$CURRENT_COMMIT" = $VERIFIED_ROOT ]; then
echo "There is a valid path from \"$INITIAL_COMMIT\" to $VERIFIED_ROOT where all commits are signed!"
exit 0
fi
if [ "$CURRENT_COMMIT" = $VERIFIED_SHA512_ROOT ]; then
if [ "$VERIFY_TREE" = "1" ]; then
echo "All Tree-SHA512s matched up to $VERIFIED_SHA512_ROOT" > /dev/stderr
fi
VERIFY_TREE=0
NO_SHA1=0
fi
if [ "$NO_SHA1" = "1" ]; then
export PERSHYANCOIN_VERIFY_COMMITS_ALLOW_SHA1=0
else
export PERSHYANCOIN_VERIFY_COMMITS_ALLOW_SHA1=1
fi
if [ "${REVSIG_ALLOWED#*$CURRENT_COMMIT}" != "$REVSIG_ALLOWED" ]; then
export PERSHYANCOIN_VERIFY_COMMITS_ALLOW_REVSIG=1
else
export PERSHYANCOIN_VERIFY_COMMITS_ALLOW_REVSIG=0
fi
if ! git -c "gpg.program=${DIR}/gpg.sh" verify-commit "$CURRENT_COMMIT" > /dev/null; then
if [ "$PREV_COMMIT" != "" ]; then
echo "No parent of $PREV_COMMIT was signed with a trusted key!" > /dev/stderr
echo "Parents are:" > /dev/stderr
PARENTS=$(git show -s --format=format:%P $PREV_COMMIT)
for PARENT in $PARENTS; do
git show -s $PARENT > /dev/stderr
done
else
echo "$CURRENT_COMMIT was not signed with a trusted key!" > /dev/stderr
fi
exit 1
fi
# We always verify the top of the tree
if [ "$VERIFY_TREE" = 1 -o "$PREV_COMMIT" = "" ]; then
IFS_CACHE="$IFS"
IFS='
'
for LINE in $(git ls-tree --full-tree -r "$CURRENT_COMMIT"); do
case "$LINE" in
"12"*)
echo "Repo contains symlinks" > /dev/stderr
IFS="$IFS_CACHE"
exit 1
;;
esac
done
IFS="$IFS_CACHE"
FILE_HASHES=""
for FILE in $(git ls-tree --full-tree -r --name-only "$CURRENT_COMMIT" | LC_ALL=C sort); do
if [ "$HAVE_GNU_SHA512" = 1 ]; then
HASH=$(git cat-file blob "$CURRENT_COMMIT":"$FILE" | sha512sum | { read FIRST _; echo $FIRST; } )
else
HASH=$(git cat-file blob "$CURRENT_COMMIT":"$FILE" | shasum -a 512 | { read FIRST _; echo $FIRST; } )
fi
[ "$FILE_HASHES" != "" ] && FILE_HASHES="$FILE_HASHES"'
'
FILE_HASHES="$FILE_HASHES$HASH $FILE"
done
if [ "$HAVE_GNU_SHA512" = 1 ]; then
TREE_HASH="$(echo "$FILE_HASHES" | sha512sum)"
else
TREE_HASH="$(echo "$FILE_HASHES" | shasum -a 512)"
fi
HASH_MATCHES=0
MSG="$(git show -s --format=format:%B "$CURRENT_COMMIT" | tail -n1)"
case "$MSG -" in
"Tree-SHA512: $TREE_HASH")
HASH_MATCHES=1;;
esac
if [ "$HASH_MATCHES" = "0" ]; then
echo "Tree-SHA512 did not match for commit $CURRENT_COMMIT" > /dev/stderr
exit 1
fi
fi
PARENTS=$(git show -s --format=format:%P "$CURRENT_COMMIT")
for PARENT in $PARENTS; do
PREV_COMMIT="$CURRENT_COMMIT"
CURRENT_COMMIT="$PARENT"
break
done
done
|
# frozen_string_literal: true
module DiscourseDev
class Engine < Rails::Engine; end
end
|
import tensorflow as tf
import numpy as np
# Prepare data
reviews = [
<positive review 1>,
<positive review 2>,
<negative review 1>,
<negative review 2>
]
labels = [1, 1, 0, 0] # 1 for positive, 0 for negative
# Tokenize the reviews
tokenizer = Tokenizer(num_words= 1000)
tokenizer.fit_on_texts(reviews)
sequence = tokenizer.texts_to_sequences(reviews)
# Zero-padding
data = pad_sequences(sequence, maxlen=200)
# Build model
model = tf.keras.Sequential()
model.add(Embedding(1000, 16, input_length=200))
model.add(LSTM(64))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
# Train
model.fit(data, labels, epochs=10)
# Test
review = <sample review>
review_seq = tokenizer.texts_to_sequences([review])
padded_review = pad_sequences(review_seq, maxlen=200)
model.predict(padded_review)
# Output: [0] # 0 for negative, 1 for positive |
#include <windows.h>
#include <stdio.h>
#pragma comment (lib,"user32.lib")
main(){
SendMessage(HWND_BROADCAST, WM_SYSCOMMAND, SC_MONITORPOWER, 2);
return 0;
}
|
const providedDependencies = new Map();
providedDependencies.set('jquery', {
dependency: 'jira.webresources:jquery',
import: {
var: `require('jquery')`,
amd: 'jquery',
},
});
providedDependencies.set('lodash', {
dependency: 'com.atlassian.plugin.jslibs:underscore-1.4.4',
import: {
var: `require('atlassian/libs/underscore-1.4.4')`,
amd: 'atlassian/libs/underscore-1.4.4',
},
});
module.exports = providedDependencies;
|
#!/bin/bash
echo "List of environment variables used in the current session:"
echo ""
env | while read line ; do
echo $line
done |
<filename>chrome/browser/google_apis/gdata_util.h
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_GOOGLE_APIS_GDATA_UTIL_H_
#define CHROME_BROWSER_GOOGLE_APIS_GDATA_UTIL_H_
#include <string>
#include "base/callback_forward.h"
#include "base/logging.h"
#include "base/platform_file.h"
#include "base/task_runner_util.h"
#include "chrome/browser/google_apis/gdata_errorcode.h"
class FilePath;
class Profile;
namespace base {
class SequencedTaskRunner;
class Time;
} // namespace base
namespace tracked_objects {
class Location;
} // tracked_objects
namespace google_apis {
namespace util {
// Returns true if Drive v2 API is enabled via commandline switch.
bool IsDriveV2ApiEnabled();
// Parses an RFC 3339 date/time into a base::Time, returning true on success.
// The time string must be in the format "yyyy-mm-ddThh:mm:ss.dddTZ" (TZ is
// either '+hh:mm', '-hh:mm', 'Z' (representing UTC), or an empty string).
bool GetTimeFromString(const base::StringPiece& raw_value, base::Time* time);
// Formats a base::Time as an RFC 3339 date/time (in UTC).
std::string FormatTimeAsString(const base::Time& time);
// Formats a base::Time as an RFC 3339 date/time (in localtime).
std::string FormatTimeAsStringLocaltime(const base::Time& time);
// Wrapper around BrowserThread::PostTask to post a task to the blocking
// pool with the given sequence token.
void PostBlockingPoolSequencedTask(
const tracked_objects::Location& from_here,
base::SequencedTaskRunner* blocking_task_runner,
const base::Closure& task);
// Similar to PostBlockingPoolSequencedTask() but this one takes a reply
// callback that runs on the calling thread.
void PostBlockingPoolSequencedTaskAndReply(
const tracked_objects::Location& from_here,
base::SequencedTaskRunner* blocking_task_runner,
const base::Closure& request_task,
const base::Closure& reply_task);
// Similar to PostBlockingPoolSequencedTaskAndReply() but this one runs the
// reply callback with the return value of the request task.
template <typename ReturnType>
void PostBlockingPoolSequencedTaskAndReplyWithResult(
const tracked_objects::Location& from_here,
base::SequencedTaskRunner* blocking_task_runner,
const base::Callback<ReturnType(void)>& request_task,
const base::Callback<void(ReturnType)>& reply_task) {
const bool posted = base::PostTaskAndReplyWithResult(blocking_task_runner,
from_here,
request_task,
reply_task);
DCHECK(posted);
}
} // namespace util
} // namespace google_apis
#endif // CHROME_BROWSER_GOOGLE_APIS_GDATA_UTIL_H_
|
<reponame>hapramp/1Rramp-Android
package com.hapramp.draft;
public class DraftListItemModel {
private String title;
private int draftId;
private String draftType;
private String json;
private String lastModified;
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getJson() {
return json;
}
public void setJson(String json) {
this.json = json;
}
public String getLastModified() {
return lastModified;
}
public void setLastModified(String lastModified) {
this.lastModified = lastModified;
}
public long getDraftId() {
return draftId;
}
public void setDraftId(int draftId) {
this.draftId = draftId;
}
public String getDraftType() {
return draftType;
}
public void setDraftType(String draftType) {
this.draftType = draftType;
}
}
|
#!/usr/bin/env bash
set -e
LATEST_PACKAGE_VERSION=$(gh api "repos/${CIRCLE_PROJECT_USERNAME}/${CIRCLE_PROJECT_REPONAME}/releases/latest" | jq .tag_name -r)
LATEST_PACKAGE_VERSION=${LATEST_PACKAGE_VERSION:1}
echo "LATEST_PACKAGE_VERSION: ${LATEST_PACKAGE_VERSION}"
echo "versions before modifying:"
cat ./lerna.json ./package.json ./packages/snyk-protect/package.json | grep version
if [[ $(uname -s) == "Darwin" ]];then
echo "this is Mac"
sed -i "" "s|1.0.0-monorepo|${LATEST_PACKAGE_VERSION}|g" ./lerna.json
sed -i "" "s|1.0.0-monorepo|${LATEST_PACKAGE_VERSION}|g" ./package.json
sed -i "" "s|1.0.0-monorepo|${LATEST_PACKAGE_VERSION}|g" ./packages/snyk-protect/package.json
else
echo "this is Linux"
sed -i "s|1.0.0-monorepo|${LATEST_PACKAGE_VERSION}|g" ./lerna.json
sed -i "s|1.0.0-monorepo|${LATEST_PACKAGE_VERSION}|g" ./package.json
sed -i "s|1.0.0-monorepo|${LATEST_PACKAGE_VERSION}|g" ./packages/snyk-protect/package.json
fi
echo "versions after modifying:"
cat ./lerna.json ./package.json ./packages/snyk-protect/package.json | grep version
|
#!/bin/bash
###################################################################################################
# SCRIPT FOR High-Fidelity and Low-Latency Universal Neural Vocoder based on #
# Multiband WaveRNN with Data-driven Linear Prediction (MWDLP) #
###################################################################################################
# Copyright 2021 Patrick Lumban Tobing (Nagoya University)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
. ./path.sh
. ./cmd.sh
# USER SETTINGS {{{
#######################################
# STAGE SETTING #
#######################################
# {{{
# 0: dump model and compile C program
# 1: run analysis-synthesis with real-time demo using cpu
# 2: run analysis-synthesis and mel-spectrogram output/input with real-time demo using cpu
# 3: run vc using speaker point target with real-time demo using cpu
# 4: run vc using interpolated speaker target with real-time demo using cpu
# }}}
stage=0
#stage=1
#stage=2
#stage=3
#stage=4
#######################################
# TRAINING SETTING #
#######################################
## fs: sampling rate
fs=`awk '{if ($1 == "fs:") print $2}' conf/config.yml`
## shiftms: frame shift in ms
shiftms=`awk '{if ($1 == "shiftms:") print $2}' conf/config.yml`
## upsampling_factor: upsampling factor for neural vocoder
upsampling_factor=`echo "${shiftms} * ${fs} / 1000" | bc`
if [ $shiftms -eq 10 ]; then
## for using skewed input convolution (> 0), or balanced (0) [only for encoder]
## lookup frame limited to only 1/2 frame [for allowing low-latency/real-time processing]
right_size_enc=1
right_size_wave=1
batch_size_wave=6
batch_size=30
elif [ $shiftms -eq 5 ]; then
right_size_enc=2
right_size_wave=2
batch_size_wave=12
batch_size=60
else
echo "shift ms not available"
exit 1
fi
#spks_open=(p237 p245 p276)
spks=(SEF1 SEF2 SEM1 SEM2 TFM1 TGM1 TMM1 TEF1 TEM1 TEF2 TEM2 TFF1 TGF1 TMF1)
data_name=vcc20_${shiftms}ms
# uv-f0 and log-f0 occupied the first two dimensions,
# then uv-codeap, log-negative-codeap and mel-ceps
## [uv-f0,log-f0,uv-codeap,log-negative-codeap,mel-ceps]
## fftl: length of FFT window analysis
## WORLD F0_floor for cheaptrick: 3.0 * fs / (fft_size - 3.0)
## [https://github.com/mmorise/World/blob/master/src/cheaptrick.cpp] line 197
## mcep_alpha: frequency warping parameter for mel-cepstrum
## n_bands: number of bands for multiband modeling [a minimum of 4 kHz per band for proper modeling]
if [ $fs -eq 22050 ]; then
wav_org_dir=wav_22kHz
data_name=${data_name}_22kHz
mcep_alpha=0.455 #22.05k ## frequency warping based on pysptk.util.mcepalpha
fftl=2048
if [ $shiftms -eq 5 ]; then
shiftms=4.9886621315192743764172335600907 #22.05k rounding 110/22050 5ms shift
elif [ $shiftms -eq 10 ]; then
shiftms=9.9773242630385487528344671201814 #22.05k rounding 220/22050 10ms shift
fi
full_excit_dim=5
n_bands=5
elif [ $fs -eq 24000 ]; then
wav_org_dir=wav_24kHz
data_name=${data_name}_24kHz
mcep_alpha=0.466 #24k
fftl=2048
full_excit_dim=6
n_bands=6
elif [ $fs -eq 48000 ]; then
wav_org_dir=wav_48kHz
data_name=${data_name}_48kHz
mcep_alpha=0.554 #48k
fftl=4096
full_excit_dim=8
n_bands=12
elif [ $fs -eq 44100 ]; then
wav_org_dir=wav_44kHz
data_name=${data_name}_44kHz
mcep_alpha=0.544 #44.1k
fftl=4096
if [ $shiftms -eq 5 ]; then
shiftms=4.9886621315192743764172335600907 #44.1k rounding 220/44100 5ms shift
elif [ $shiftms -eq 10 ]; then
shiftms=9.9773242630385487528344671201814 #44.1k rounding 440/44100 10ms shift
fi
full_excit_dim=8
n_bands=10
elif [ $fs -eq 16000 ]; then
wav_org_dir=wav_16kHz
data_name=${data_name}_16kHz
mcep_alpha=0.41000000000000003 #16k
fftl=1024
full_excit_dim=4
n_bands=4
elif [ $fs -eq 8000 ]; then
wav_org_dir=wav_8kHz
data_name=${data_name}_8kHz
mcep_alpha=0.312 #8k
fftl=1024
full_excit_dim=4
n_bands=2
else
echo "sampling rate not available"
exit 1
fi
## from WORLD: number of code-aperiodicities = min(15000,fs/2-3000)/3000
## [https://github.com/mmorise/World/blob/master/src/codec.cpp] line 212
## mcep_dim: number of mel-cepstrum dimension
mcep_dim=`awk '{if ($1 == "mcep_dim:") print $2}' conf/config.yml`
## powmcep_dim: 0th power + mcep_dim
powmcep_dim=`expr ${mcep_dim} + 1`
## winms: window length analysis for mel-spectrogram extraction
winms=`awk '{if ($1 == "winms:") print $2}' conf/config.yml`
## mel_dim: number of mel-spectrogram dimension
mel_dim=`awk '{if ($1 == "mel_dim:") print $2}' conf/config.yml`
## highpass_cutoff: cutoff frequency for low-cut filter to remove DC-component in recording
highpass_cutoff=`awk '{if ($1 == "highpass_cutoff:") print $2}' conf/config.yml`
## alpha: coefficient for pre-emphasis
alpha=`awk '{if ($1 == "alpha:") print $2}' conf/config.yml`
trn=tr_${data_name}
dev=dv_${data_name}
tst=ts_${data_name}
lr=`awk '{if ($1 == "lr:") print $2}' conf/config.yml`
do_prob=`awk '{if ($1 == "do_prob:") print $2}' conf/config.yml`
n_workers=`awk '{if ($1 == "n_workers:") print $2}' conf/config.yml`
### settings for VC network
step_count=`awk '{if ($1 == "step_count:") print $2}' conf/config.yml`
mdl_name_vc=`awk '{if ($1 == "mdl_name_vc:") print $2}' conf/config.yml`
n_half_cyc=`awk '{if ($1 == "n_half_cyc:") print $2}' conf/config.yml`
lat_dim=`awk '{if ($1 == "lat_dim:") print $2}' conf/config.yml`
lat_dim_e=`awk '{if ($1 == "lat_dim_e:") print $2}' conf/config.yml`
hidden_units_enc=`awk '{if ($1 == "hidden_units_enc:") print $2}' conf/config.yml`
hidden_layers_enc=`awk '{if ($1 == "hidden_layers_enc:") print $2}' conf/config.yml`
hidden_units_dec=`awk '{if ($1 == "hidden_units_dec:") print $2}' conf/config.yml`
hidden_layers_dec=`awk '{if ($1 == "hidden_layers_dec:") print $2}' conf/config.yml`
hidden_units_lf0=`awk '{if ($1 == "hidden_units_lf0:") print $2}' conf/config.yml`
hidden_layers_lf0=`awk '{if ($1 == "hidden_layers_lf0:") print $2}' conf/config.yml`
kernel_size_enc=`awk '{if ($1 == "kernel_size_enc:") print $2}' conf/config.yml`
dilation_size_enc=`awk '{if ($1 == "dilation_size_enc:") print $2}' conf/config.yml`
kernel_size_spk=`awk '{if ($1 == "kernel_size_spk:") print $2}' conf/config.yml`
dilation_size_spk=`awk '{if ($1 == "dilation_size_spk:") print $2}' conf/config.yml`
kernel_size_dec=`awk '{if ($1 == "kernel_size_dec:") print $2}' conf/config.yml`
dilation_size_dec=`awk '{if ($1 == "dilation_size_dec:") print $2}' conf/config.yml`
kernel_size_lf0=`awk '{if ($1 == "kernel_size_lf0:") print $2}' conf/config.yml`
dilation_size_lf0=`awk '{if ($1 == "dilation_size_lf0:") print $2}' conf/config.yml`
causal_conv_enc=`awk '{if ($1 == "causal_conv_enc:") print $2}' conf/config.yml`
causal_conv_dec=`awk '{if ($1 == "causal_conv_dec:") print $2}' conf/config.yml`
causal_conv_lf0=`awk '{if ($1 == "causal_conv_lf0:") print $2}' conf/config.yml`
spkidtr_dim=`awk '{if ($1 == "spkidtr_dim:") print $2}' conf/config.yml`
emb_spk_dim=`awk '{if ($1 == "emb_spk_dim:") print $2}' conf/config.yml`
n_weight_emb=`awk '{if ($1 == "n_weight_emb:") print $2}' conf/config.yml`
right_size_spk=`awk '{if ($1 == "right_size_spk:") print $2}' conf/config.yml`
right_size_dec=`awk '{if ($1 == "right_size_dec:") print $2}' conf/config.yml`
right_size_lf0=`awk '{if ($1 == "right_size_lf0:") print $2}' conf/config.yml`
s_conv_flag=`awk '{if ($1 == "s_conv_flag:") print $2}' conf/config.yml`
seg_conv_flag=`awk '{if ($1 == "seg_conv_flag:") print $2}' conf/config.yml`
t_start_cycvae=`awk '{if ($1 == "t_start_cycvae:") print $2}' conf/config.yml`
t_end_cycvae=`awk '{if ($1 == "t_end_cycvae:") print $2}' conf/config.yml`
interval_cycvae=`awk '{if ($1 == "interval_cycvae:") print $2}' conf/config.yml`
densities_cycvae_enc=`awk '{if ($1 == "densities_cycvae_enc:") print $2}' conf/config.yml`
densities_cycvae_dec=`awk '{if ($1 == "densities_cycvae_dec:") print $2}' conf/config.yml`
n_stage_cycvae=`awk '{if ($1 == "n_stage_cycvae:") print $2}' conf/config.yml`
### settings for neural vocoder
step_count_wave=`awk '{if ($1 == "step_count_wave:") print $2}' conf/config.yml`
mdl_name_wave=`awk '{if ($1 == "mdl_name_wave:") print $2}' conf/config.yml`
hidden_units_wave=`awk '{if ($1 == "hidden_units_wave:") print $2}' conf/config.yml`
hidden_units_wave_2=`awk '{if ($1 == "hidden_units_wave_2:") print $2}' conf/config.yml`
kernel_size_wave=`awk '{if ($1 == "kernel_size_wave:") print $2}' conf/config.yml`
dilation_size_wave=`awk '{if ($1 == "dilation_size_wave:") print $2}' conf/config.yml`
kernel_size=`awk '{if ($1 == "kernel_size:") print $2}' conf/config.yml`
hid_chn=`awk '{if ($1 == "hid_chn:") print $2}' conf/config.yml`
skip_chn=`awk '{if ($1 == "skip_chn:") print $2}' conf/config.yml`
dilation_depth=`awk '{if ($1 == "dilation_depth:") print $2}' conf/config.yml`
dilation_repeat=`awk '{if ($1 == "dilation_repeat:") print $2}' conf/config.yml`
t_start=`awk '{if ($1 == "t_start:") print $2}' conf/config.yml`
t_end=`awk '{if ($1 == "t_end:") print $2}' conf/config.yml`
interval=`awk '{if ($1 == "interval:") print $2}' conf/config.yml`
densities=`awk '{if ($1 == "densities:") print $2}' conf/config.yml`
n_stage=`awk '{if ($1 == "n_stage:") print $2}' conf/config.yml`
lpc=`awk '{if ($1 == "lpc:") print $2}' conf/config.yml`
causal_conv_wave=`awk '{if ($1 == "causal_conv_wave:") print $2}' conf/config.yml`
seg_conv_flag_wave=`awk '{if ($1 == "seg_conv_flag_wave:") print $2}' conf/config.yml`
s_dim=`awk '{if ($1 == "s_dim:") print $2}' conf/config.yml`
mid_dim=`awk '{if ($1 == "mid_dim:") print $2}' conf/config.yml`
#######################################
# DECODING SETTING #
#######################################
### This is for VC source-target pairs
spks_src_dec=(SEM1 SEF2 SEM2 SEF1)
spks_src_dec=(SEM1 SEF1)
spks_src_dec=(SEF2)
spks_trg_dec=(TFM1 TGM1 TMM1 TEF1 TEM1 TEF2 TEM2 TFF1 TGF1 TMF1)
spks_trg_dec=(TEF1 TEM2)
spks_trg_dec=(TEM2)
###
###
#n_interp=1
#n_interp=2
n_interp=4
#n_interp=8
#n_interp=10
###
### This is for speakers that will be used in analysis-synthesis
spks_dec=(SEF1 SEF2 SEM1 SEM2 TFM1 TGM1 TMM1 TEF1 TEM1 TEF2 TEM2 TFF1 TGF1 TMF1)
spks_dec=(TEM2 SEF2)
spks_dec=(TEM2)
###
### This is the maximum number of waveforms to be decoded per speaker
#n_wav_decode=1
#n_wav_decode=5
n_wav_decode=10
#n_wav_decode=50
###
# parse options
. parse_options.sh
# stop when error occured
set -e
# }}}
if [ `echo ${stage} | grep 0` ] || [ `echo ${stage} | grep 4` ];then
echo $mdl_name_vc
if [ $mdl_name_vc == "cycmelspxlf0capspkvae-gauss-smpl_sparse_weightemb_v2" ]; then
setting_vc=${mdl_name_vc}_${data_name}_lr${lr}_bs${batch_size}_lat${lat_dim}_late${lat_dim_e}_hue${hidden_units_enc}_hud${hidden_units_dec}_huf${hidden_units_lf0}_do${do_prob}_st${step_count}_mel${mel_dim}_nhcyc${n_half_cyc}_s${spkidtr_dim}_w${n_weight_emb}_ts${t_start_cycvae}_te${t_end_cycvae}_i${interval_cycvae}_de${densities_cycvae_enc}_dd${densities_cycvae_dec}_ns${n_stage_cycvae}_sc${s_conv_flag}_ss${seg_conv_flag}
fi
expdir_vc=exp/tr_${setting_vc}
echo $expdir_vc
if [ -f "${expdir_vc}/checkpoint-last.pkl" ]; then
${train_cmd} ${expdir_vc}/get_model_indices.log \
get_model_indices.py \
--expdir ${expdir_vc} \
--confdir conf/${data_name}_vc
min_idx_cycvae=`cat conf/${data_name}_vc.idx | awk '{print $2}'`
echo "${data_name}: min_idx_cycvae=${min_idx_cycvae}"
else
echo "vc checkpoints not found, please run vc training step"
fi
echo $mdl_name_wave
if [ $mdl_name_wave == "wavernn_dualgru_compact_lpc_mband_10bit_cf_stft_emb_v2" ]; then
setting_wave=${mdl_name_wave}_${data_name}_lr${lr}_bs${batch_size_wave}_huw${hidden_units_wave}_hu2w${hidden_units_wave_2}_ksw${kernel_size_wave}_dsw${dilation_size_wave}_do${do_prob}_st${step_count_wave}_mel${mel_dim}_ts${t_start}_te${t_end}_i${interval}_d${densities}_ns${n_stage}_lpc${lpc}_rs${right_size_wave}_nb${n_bands}_s${s_dim}_m${mid_dim}_ss${seg_conv_flag_wave}
fi
expdir_wave=exp/tr_${setting_wave}
echo $expdir_wave
if [ -f "${expdir_wave}/checkpoint-last.pkl" ]; then
${train_cmd} ${expdir_wave}/get_model_indices.log \
get_model_indices.py \
--expdir ${expdir_wave} \
--confdir conf/${data_name}_wave
min_idx_wave=`cat conf/${data_name}_wave.idx | awk '{print $2}'`
echo "${data_name}: min_idx_wave=${min_idx_wave}"
else
echo "mwdlp checkpoints not found, please run mwdlp training step"
fi
fi
demo_dir=demo_realtime_init
# STAGE 0 {{{
if [ `echo ${stage} | grep 0` ];then
echo "###########################################################"
echo "# DUMP MODEL AND COMPILE REAL-TIME DEMO STEP #"
echo "###########################################################"
echo ""
echo "model is been dumping, please check ${expdir_vc}/dump_model.log"
echo ""
${train_cmd} ${expdir_vc}/dump_model.log \
dump_sparse-cyclevae_init_mwdlp-10b.py \
${expdir_vc}/model.conf \
${expdir_vc}/checkpoint-${min_idx_cycvae}.pkl \
${expdir_wave}/model.conf \
${expdir_wave}/checkpoint-${min_idx_wave}.pkl \
--fs ${fs} \
--shiftms ${shiftms} \
--winms ${winms} \
--fftl ${fftl} \
--highpass_cutoff ${highpass_cutoff}
mv -v *.h ${demo_dir}/inc
mv -v *.c ${demo_dir}/src
echo ""
echo "dump model finished"
echo ""
echo "now compiling..."
echo ""
cd ${demo_dir}
make clean
make
cd ..
echo ""
echo "compile finished, please try to run real-time decoding"
echo ""
fi
# }}}
# STAGE 1 {{{
if [ `echo ${stage} | grep 1` ];then
echo "###########################################################"
echo "# ANALYSIS-SYNTHESIS WITH REAL-TIME DEMO STEP #"
echo "###########################################################"
out_dir=wav_anasyn_realtime_init
mkdir -p ${out_dir}
out_dir=${out_dir}/${data_name}
mkdir -p ${out_dir}
for spk_src in ${spks_dec[@]};do
out_spk_dir=${out_dir}/${spk_src}
mkdir -p ${out_spk_dir}
out_spk_dv_dir=${out_spk_dir}/dev
mkdir -p ${out_spk_dv_dir}
dv_list=data/${dev}/wav.scp
wav_dv_scp=${out_spk_dv_dir}/wav.scp
cat $dv_list | grep "\/${spk_src}\/" | sort | head -n ${n_wav_decode} > ${wav_dv_scp}
rm -f "${out_spk_dv_dir}/log.txt"
echo ""
echo "waveforms are being synthesized, please see the log in ${out_spk_dv_dir}/log.txt"
echo ""
while read line;do
name=`basename $line`
echo $line ${out_spk_dv_dir}/log.txt
echo $line >> ${out_spk_dv_dir}/log.txt
echo ${out_spk_dv_dir}/$name >> ${out_spk_dv_dir}/log.txt
./${demo_dir}/bin/test_cycvae_mwdlp $line ${out_spk_dv_dir}/$name >> ${out_spk_dv_dir}/log.txt
done < ${wav_dv_scp}
rm -f ${wav_dv_scp}
out_spk_ts_dir=${out_spk_dir}/test
mkdir -p ${out_spk_ts_dir}
ts_list=data/${tst}/wav.scp
wav_ts_scp=${out_spk_ts_dir}/wav.scp
cat $ts_list | grep "\/${spk_src}\/" | sort | head -n ${n_wav_decode} > ${wav_ts_scp}
rm -f "${out_spk_ts_dir}/log.txt"
echo ""
echo "waveforms are being synthesized, please see the log in ${out_spk_ts_dir}/log.txt"
echo ""
while read line;do
name=`basename $line`
echo $line ${out_spk_ts_dir}/log.txt
echo $line >> ${out_spk_ts_dir}/log.txt
echo ${out_spk_ts_dir}/$name >> ${out_spk_ts_dir}/log.txt
./${demo_dir}/bin/test_cycvae_mwdlp $line ${out_spk_ts_dir}/$name >> ${out_spk_ts_dir}/log.txt
done < ${wav_ts_scp}
rm -f ${wav_ts_scp}
echo ""
echo "synthesis of ${spk_src} finished, outputs are located in ${out_spk_dv_dir} and ${out_spk_ts_dir}"
done
echo ""
echo "synthesis of all speakers finished, outputs are located in respective directories of ${out_dir}"
echo ""
fi
# }}}
# STAGE 2 {{{
if [ `echo ${stage} | grep 2` ];then
echo "###########################################################"
echo "# SYNTHESIS AND MEL-SPEC OUT/IN WITH REAL-TIME DEMO #"
echo "###########################################################"
out_dir=wav_melsp_realtime_init
mkdir -p ${out_dir}
out_dir=${out_dir}/${data_name}
mkdir -p ${out_dir}
for spk_src in ${spks_dec[@]};do
out_spk_dir=${out_dir}/${spk_src}
mkdir -p ${out_spk_dir}
out_spk_dv_dir=${out_spk_dir}/dev
mkdir -p ${out_spk_dv_dir}
dv_list=data/${dev}/wav.scp
wav_dv_scp=${out_spk_dv_dir}/wav.scp
cat $dv_list | grep "\/${spk_src}\/" | sort | head -n ${n_wav_decode} > ${wav_dv_scp}
rm -f "${out_spk_dv_dir}/log.txt"
echo ""
echo "waveforms & melsp are being synthesized & generated, please see the log in ${out_spk_dv_dir}/log.txt"
echo ""
while read line;do
name=`basename $line .wav`
echo $line ${out_spk_dv_dir}/log.txt
echo $line >> ${out_spk_dv_dir}/log.txt
echo ${out_spk_dv_dir}/${name}_anasyn.wav >> ${out_spk_dv_dir}/log.txt
./${demo_dir}/bin/test_cycvae_mwdlp -o ${out_spk_dv_dir}/${name}_melsp.bin ${out_spk_dv_dir}/${name}_melsp.txt \
$line ${out_spk_dv_dir}/${name}_anasyn.wav >> ${out_spk_dv_dir}/log.txt
echo $line >> ${out_spk_dv_dir}/log.txt
echo ${out_spk_dv_dir}/${name}_binsyn.wav >> ${out_spk_dv_dir}/log.txt
./${demo_dir}/bin/test_cycvae_mwdlp -b ${out_spk_dv_dir}/${name}_melsp.bin ${out_spk_dv_dir}/${name}_binsyn.wav >> ${out_spk_dv_dir}/log.txt
echo $line >> ${out_spk_dv_dir}/log.txt
echo ${out_spk_dv_dir}/${name}_txtsyn.wav >> ${out_spk_dv_dir}/log.txt
./${demo_dir}/bin/test_cycvae_mwdlp -t ${out_spk_dv_dir}/${name}_melsp.txt ${out_spk_dv_dir}/${name}_txtsyn.wav >> ${out_spk_dv_dir}/log.txt
done < ${wav_dv_scp}
rm -f ${wav_dv_scp}
out_spk_ts_dir=${out_spk_dir}/test
mkdir -p ${out_spk_ts_dir}
ts_list=data/${tst}/wav.scp
wav_ts_scp=${out_spk_ts_dir}/wav.scp
cat $ts_list | grep "\/${spk_src}\/" | sort | head -n ${n_wav_decode} > ${wav_ts_scp}
rm -f "${out_spk_ts_dir}/log.txt"
echo ""
echo "waveforms & melsp are being synthesized & generated, please see the log in ${out_spk_ts_dir}/log.txt"
echo ""
while read line;do
name=`basename $line .wav`
echo $line ${out_spk_ts_dir}/log.txt
echo $line >> ${out_spk_ts_dir}/log.txt
echo ${out_spk_ts_dir}/$name >> ${out_spk_ts_dir}/log.txt
echo ${out_spk_ts_dir}/${name}_anasyn.wav >> ${out_spk_ts_dir}/log.txt
./${demo_dir}/bin/test_cycvae_mwdlp -o ${out_spk_ts_dir}/${name}_melsp.bin ${out_spk_ts_dir}/${name}_melsp.txt \
$line ${out_spk_ts_dir}/${name}_anasyn.wav >> ${out_spk_ts_dir}/log.txt
echo $line >> ${out_spk_ts_dir}/log.txt
echo ${out_spk_ts_dir}/${name}_binsyn.wav >> ${out_spk_ts_dir}/log.txt
./${demo_dir}/bin/test_cycvae_mwdlp -b ${out_spk_ts_dir}/${name}_melsp.bin ${out_spk_ts_dir}/${name}_binsyn.wav >> ${out_spk_ts_dir}/log.txt
echo $line >> ${out_spk_ts_dir}/log.txt
echo ${out_spk_ts_dir}/${name}_txtsyn.wav >> ${out_spk_ts_dir}/log.txt
./${demo_dir}/bin/test_cycvae_mwdlp -t ${out_spk_ts_dir}/${name}_melsp.txt ${out_spk_ts_dir}/${name}_txtsyn.wav >> ${out_spk_ts_dir}/log.txt
done < ${wav_ts_scp}
rm -f ${wav_ts_scp}
echo ""
echo "synthesis and melsp out & in of ${spk_src} finished, outputs are located in ${out_spk_dv_dir} and ${out_spk_ts_dir}"
done
echo ""
echo "synthesis and melsp out & in of all speakers finished, outputs are located in respective directories of ${out_dir}"
echo ""
fi
# }}}
# STAGE 3 {{{
if [ `echo ${stage} | grep 3` ];then
echo "###########################################################"
echo "# VC ON SPEAKER POINT TARGET WITH REAL-TIME DEMO STEP #"
echo "###########################################################"
out_dir=wav_cv_point_init
mkdir -p ${out_dir}
out_dir=${out_dir}/${data_name}
mkdir -p ${out_dir}
for spk_src in ${spks_src_dec[@]};do
for spk_trg in ${spks_trg_dec[@]};do
spk_idx=1
for spk_srch in ${spks[@]};do
if [ "$spk_trg" == "$spk_srch" ]; then
break
fi
spk_idx=$((${spk_idx}+1))
done
if [ $spk_idx -gt ${#spks[@]} ]; then
echo error, $spk_trg not in spk_list
exit
fi
out_spk_dir=${out_dir}/${spk_src}-${spk_trg}
mkdir -p ${out_spk_dir}
out_spk_dv_dir=${out_spk_dir}/dev
mkdir -p ${out_spk_dv_dir}
dv_list=data/${dev}/wav.scp
wav_dv_scp=${out_spk_dv_dir}/wav.scp
cat $dv_list | grep "\/${spk_src}\/" | sort | head -n ${n_wav_decode} > ${wav_dv_scp}
rm -f "${out_spk_dv_dir}/log.txt"
echo ""
echo "waveforms are being synthesized, please see the log in ${out_spk_dv_dir}/log.txt"
echo ""
while read line;do
name=`basename $line`
echo $line ${out_spk_dv_dir}/log.txt
echo $line >> ${out_spk_dv_dir}/log.txt
echo ${out_spk_dv_dir}/$name >> ${out_spk_dv_dir}/log.txt
./${demo_dir}/bin/test_cycvae_mwdlp -i ${spk_idx} $line ${out_spk_dv_dir}/$name >> ${out_spk_dv_dir}/log.txt
done < ${wav_dv_scp}
rm -f ${wav_dv_scp}
out_spk_ts_dir=${out_spk_dir}/test
mkdir -p ${out_spk_ts_dir}
ts_list=data/${tst}/wav.scp
wav_ts_scp=${out_spk_ts_dir}/wav.scp
cat $ts_list | grep "\/${spk_src}\/" | sort | head -n ${n_wav_decode} > ${wav_ts_scp}
rm -f "${out_spk_ts_dir}/log.txt"
echo ""
echo "waveforms are being synthesized, please see the log in ${out_spk_ts_dir}/log.txt"
echo ""
while read line;do
name=`basename $line`
echo $line ${out_spk_ts_dir}/log.txt
echo $line >> ${out_spk_ts_dir}/log.txt
echo ${out_spk_ts_dir}/$name >> ${out_spk_ts_dir}/log.txt
./${demo_dir}/bin/test_cycvae_mwdlp -i ${spk_idx} $line ${out_spk_ts_dir}/$name >> ${out_spk_ts_dir}/log.txt
done < ${wav_ts_scp}
rm -f ${wav_ts_scp}
echo ""
echo "synthesis of ${spk_src}-${spk_trg} finished, outputs are located in ${out_spk_dv_dir} and ${out_spk_ts_dir}"
done
echo ""
echo "synthesis of ${spk_src} finished, outputs are located in ${out_spk_dv_dir} and ${out_spk_ts_dir}"
done
echo ""
echo "synthesis of all speakers finished, outputs are located in respective directories of ${out_dir}"
echo ""
fi
# }}}
# STAGE 4 {{{
if [ `echo ${stage} | grep 4` ];then
echo "###########################################################"
echo "# VC ON INTERPOLATED POINT WITH REAL-TIME DEMO STEP #"
echo "###########################################################"
model=${expdir_vc}/checkpoint-${min_idx_cycvae}.pkl
config=${expdir_vc}/model.conf
outdir=${expdir_vc}/spkidtr-${min_idx_cycvae}
mkdir -p $outdir
${cuda_cmd} ${expdir_vc}/log/decode_spkidtr_${min_idx_cycvae}.log \
decode_spkidtr_map.py \
--outdir ${outdir} \
--model ${model} \
--config ${config}
#exit
echo ""
echo "speaker space has been mapped, please see the figure and coords here: ${outdir}"
echo ""
awk -v n_interp="${n_interp}" 'BEGIN {flag_spkid=0;} \
{ \
if (flag_spkid) { \
if ($1 != "(1,") { \
if ($1 > 1) { \
if ($3 < min_x) min_x = $3;
else if ($3 > max_x) max_x = $3;
if ($4 < min_y) min_y = $4;
else if ($4 > max_y) max_y = $4;
} else { \
min_x = $3;
max_x = $3;
min_y = $4;
max_y = $4;
} \
} else { \
flag_spkid=0;
} \
} else { \
if ($1 == "spk-id") flag_spkid=1; \
} \
} \
END { \
delta_max_min_x = (max_x - min_x) / n_interp;
x=min_x;
for (i=0;i<=n_interp;i++) {
if (i < n_interp) printf "%lf ", x;
else printf "%lf\n", x;
x += delta_max_min_x;
}
y=min_y;
delta_max_min_y = (max_y - min_y) / n_interp;
for (i=0;i<=n_interp;i++) {
if (i < n_interp) printf "%lf ", y;
else printf "%lf\n", y;
y += delta_max_min_y;
}
}' \
${expdir_vc}/log/decode_spkidtr_${min_idx_cycvae}.log \
> conf/min_max_coord_${n_interp}_${data_name}.txt
x_coords=(`cat conf/min_max_coord_${n_interp}_${data_name}.txt | head -n 1`)
y_coords=(`cat conf/min_max_coord_${n_interp}_${data_name}.txt | tail -n 1`)
echo "min_to_max x, with ${n_interp} interpolations:" ${x_coords[@]}
echo "min_to_max y, with ${n_interp} interpolations:" ${y_coords[@]}
#exit
out_dir=wav_cv_interp_init
mkdir -p ${out_dir}
out_dir=${out_dir}/${data_name}
mkdir -p ${out_dir}
for spk_src in ${spks_src_dec[@]};do
out_spk_dir=${out_dir}/${spk_src}
mkdir -p ${out_spk_dir}
out_spk_dv_dir=${out_spk_dir}/dev
mkdir -p ${out_spk_dv_dir}
dv_list=data/${dev}/wav.scp
wav_dv_scp=${out_spk_dv_dir}/wav.scp
cat $dv_list | grep "\/${spk_src}\/" | sort | head -n ${n_wav_decode} > ${wav_dv_scp}
rm -f "${out_spk_dv_dir}/log.txt"
echo ""
echo "waveforms are being synthesized, please see the log in ${out_spk_dv_dir}/log.txt"
echo ""
while read line;do
name=`basename $line .wav`
for x_coord in ${x_coords[@]};do
for y_coord in ${y_coords[@]};do
echo $line ${out_spk_dv_dir}/log.txt $x_coord $y_coord
echo $line ${x_coord} ${y_coord} >> ${out_spk_dv_dir}/log.txt
echo ${out_spk_dv_dir}/${name}_${x_coord}_${y_coord}.wav >> ${out_spk_dv_dir}/log.txt
./${demo_dir}/bin/test_cycvae_mwdlp -c ${x_coord} ${y_coord} \
$line ${out_spk_dv_dir}/${name}_${x_coord}_${y_coord}.wav >> ${out_spk_dv_dir}/log.txt
done
done
done < ${wav_dv_scp}
rm -f ${wav_dv_scp}
out_spk_ts_dir=${out_spk_dir}/test
mkdir -p ${out_spk_ts_dir}
ts_list=data/${tst}/wav.scp
wav_ts_scp=${out_spk_ts_dir}/wav.scp
cat $ts_list | grep "\/${spk_src}\/" | sort | head -n ${n_wav_decode} > ${wav_ts_scp}
rm -f "${out_spk_ts_dir}/log.txt"
echo ""
echo "waveforms are being synthesized, please see the log in ${out_spk_ts_dir}/log.txt"
echo ""
while read line;do
name=`basename $line .wav`
for x_coord in ${x_coords[@]};do
for y_coord in ${y_coords[@]};do
echo $line ${out_spk_ts_dir}/log.txt $x_coord $y_coord
echo $line ${x_coord} ${y_coord} >> ${out_spk_ts_dir}/log.txt
echo ${out_spk_ts_dir}/${name}_${x_coord}_${y_coord}.wav >> ${out_spk_ts_dir}/log.txt
./${demo_dir}/bin/test_cycvae_mwdlp -c ${x_coord} ${y_coord} \
$line ${out_spk_ts_dir}/${name}_${x_coord}_${y_coord}.wav >> ${out_spk_ts_dir}/log.txt
done
done
done < ${wav_ts_scp}
rm -f ${wav_ts_scp}
echo ""
echo "synthesis of ${spk_src} finished, outputs are located in ${out_spk_dv_dir} and ${out_spk_ts_dir}"
done
echo ""
echo "synthesis of all speakers finished, outputs are located in respective directories of ${out_dir}"
echo ""
fi
# }}}
|
import React from "react";
import styled from "styled-components/native";
export default TextStyle = ({ ...props}) => {
return <Text {...props}>{props.children}</Text>;
};
const Text = styled.Text`
color: ${(props) => props.color ?? "#DBDBDB"};
font-family: "Avenir";
margin: ${(props) => props.margin ?? 0};
padding: ${(props) => props.padding ?? 0};
${({title, large, medium, small, tiny}) => {
switch(true) {
case title:
return `font-size: 32px`;
case large:
return `font-size: 18px;`;
case medium:
return `font-size: 15px;`;
case small:
return `font-size: 11px;`;
case tiny:
return `font-size: 10px;`;
default:
return `font-size: 13px;`;
}
}}
${({light, bold, heavy, black}) => {
switch(true) {
case light:
return `font-weight: 200`;
case bold:
return `font-weight: 600;`;
case heavy:
return `font-weight: 700;`;
case black:
return `font-weight: 900`;
default:
return `font-weight: 400;`;
}
}}
${({center, right}) => {
switch(true) {
case center:
return `text-align: center`;
case right:
return `text-align: right;`;
default:
return `text-align: left;`;
}
}}
` |
import * as zjson from "../zjson"
export class Schema {
constructor(readonly columns: zjson.Column[]) {}
flatten() {
const flat = (cols: zjson.Column[], prefix = ""): zjson.Column[] => {
return cols.flatMap((c) => {
const name = prefix + c.name
if (c.type == "record") {
return flat(c.of, name + ".")
} else {
return {...c, name}
}
})
}
return flat(this.columns)
}
}
|
#!/bin/bash
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
set -e
set -x
source tensorflow/tools/ci_build/release/common.sh
set_bazel_outdir
install_ubuntu_16_pip_deps pip3.6
install_bazelisk
python2.7 tensorflow/tools/ci_build/update_version.py --nightly
# Run configure.
export TF_NEED_GCP=1
export TF_NEED_HDFS=1
export TF_NEED_S3=1
export TF_NEED_CUDA=1
export TF_CUDA_VERSION=10
export TF_CUDNN_VERSION=7
export TF_CUDA_COMPUTE_CAPABILITIES=3.5,3.7,5.2,6.0,6.1,7.0
export TF_NEED_TENSORRT=1
export TENSORRT_INSTALL_PATH=/usr/local/tensorrt
export CC_OPT_FLAGS='-mavx'
export PYTHON_BIN_PATH=$(which python3.6)
yes "" | "$PYTHON_BIN_PATH" configure.py
# Build the pip package
bazel build --config=opt --config=v2 \
--crosstool_top=//third_party/toolchains/preconfig/ubuntu16.04/gcc7_manylinux2010-nvcc-cuda10.1:toolchain \
tensorflow/tools/pip_package:build_pip_package
./bazel-bin/tensorflow/tools/pip_package/build_pip_package pip_pkg --nightly_flag
./bazel-bin/tensorflow/tools/pip_package/build_pip_package pip_pkg --gpu --nightly_flag
# Upload the built packages to pypi.
for WHL_PATH in $(ls pip_pkg/tf_nightly*dev*.whl); do
WHL_DIR=$(dirname "${WHL_PATH}")
WHL_BASE_NAME=$(basename "${WHL_PATH}")
AUDITED_WHL_NAME="${WHL_DIR}"/$(echo "${WHL_BASE_NAME//linux/manylinux2010}")
# Copy and rename for gpu manylinux as we do not want auditwheel to package in libcudart.so
WHL_PATH=${AUDITED_WHL_NAME}
cp "${WHL_DIR}"/"${WHL_BASE_NAME}" "${WHL_PATH}"
echo "Copied manylinux2010 wheel file at: ${WHL_PATH}"
# test the whl pip package
chmod +x tensorflow/tools/ci_build/builds/nightly_release_smoke_test.sh
./tensorflow/tools/ci_build/builds/nightly_release_smoke_test.sh ${AUDITED_WHL_NAME}
RETVAL=$?
# Upload the PIP package if whl test passes.
if [ ${RETVAL} -eq 0 ]; then
echo "Basic PIP test PASSED, Uploading package: ${AUDITED_WHL_NAME}"
twine upload -r pypi-warehouse "${AUDITED_WHL_NAME}"
else
echo "Basic PIP test FAILED, will not upload ${AUDITED_WHL_NAME} package"
return 1
fi
done
|
import promiseFromWXCallback from '../lib/promiseFromWXCallback';
const request = promiseFromWXCallback(wx.request);
const host = 'https://service.mediamall.cccwei.com';
export default (config) => {
try
{
const token = wx.getStorageSync('token');
if(config.path && config.path.indexOf('http://') ==0 ){
config.url = config.path
} else {
config.url = config.path ? host + config.path : config.url;
}
config.header = config.header || {};
config.header.authorization = `Bearer {${token}}`;
return request(config);
} catch(e) {
console.log(e);
}
};
|
<reponame>xeonray-origin/react-typescript-redux-template<filename>src/components/pages/products.tsx
import React from 'react';
import { connect } from 'react-redux';
import { Typography } from '@material-ui/core';
const ProductsPage = (props: any) => {
return (
<>
<Typography variant={'h2'}>Products</Typography>
</>
);
};
export default connect(null, null)(ProductsPage);
|
const { v4: uuidv4 } = require('uuid');
const { ObjectId } = require('mongodb');
const User = require('../../../src/models/User');
const Customer = require('../../../src/models/Customer');
const Contract = require('../../../src/models/Contract');
const Service = require('../../../src/models/Service');
const Event = require('../../../src/models/Event');
const Sector = require('../../../src/models/Sector');
const SectorHistory = require('../../../src/models/SectorHistory');
const UserCompany = require('../../../src/models/UserCompany');
const Surcharge = require('../../../src/models/Surcharge');
const DistanceMatrix = require('../../../src/models/DistanceMatrix');
const { authCompany, otherCompany } = require('../../seed/authCompaniesSeed');
const { deleteNonAuthenticationSeeds } = require('../helpers/authentication');
const { WEBAPP } = require('../../../src/helpers/constants');
const { auxiliaryRoleId, coachRoleId } = require('../../seed/authRolesSeed');
const contractId = new ObjectId();
const auxiliaryId = new ObjectId();
const customerId = new ObjectId();
const subscriptionIds = [new ObjectId(), new ObjectId(), new ObjectId(), new ObjectId()];
const serviceIds = [new ObjectId(), new ObjectId(), new ObjectId(), new ObjectId()];
const sectorId = new ObjectId();
const user = {
_id: new ObjectId(),
local: { email: '<EMAIL>' },
identity: { lastname: 'Toto' },
refreshToken: uuidv4(),
role: { client: coachRoleId },
inactivityDate: '2018-11-01T12:52:27.461Z',
origin: WEBAPP,
};
const auxiliary = {
_id: auxiliaryId,
identity: { firstname: 'Test7', lastname: 'Test7' },
local: { email: '<EMAIL>' },
inactivityDate: '2019-06-01T00:00:00',
refreshToken: uuidv4(),
role: { client: auxiliaryRoleId },
contracts: contractId,
origin: WEBAPP,
administrative: { phoneInvoice: { driveId: 'qwertyuioiuytrew' } },
};
const auxiliaryFromOtherCompany = {
_id: new ObjectId(),
identity: { firstname: 'Cricri', lastname: 'test' },
local: { email: '<EMAIL>' },
refreshToken: uuidv4(),
role: { client: auxiliaryRoleId },
contracts: contractId,
origin: WEBAPP,
};
const userCompanyList = [
{ _id: new ObjectId(), user: user._id, company: authCompany._id },
{ _id: new ObjectId(), user: auxiliaryId, company: authCompany._id },
{ _id: new ObjectId(), user: auxiliaryFromOtherCompany._id, company: otherCompany._id },
];
const contract = {
createdAt: '2018-12-04T16:34:04',
serialNumber: 'aswertyujnmklk',
endDate: '2022-05-28T23:59:59.000Z',
endNotificationDate: '2022-03-28T00:00:00.000Z',
endReason: 'mutation',
user: auxiliaryId,
startDate: '2018-12-03T00:00:00.000Z',
_id: contractId,
company: authCompany._id,
versions: [
{
createdAt: '2018-12-04T16:34:04',
endDate: null,
grossHourlyRate: 10.28,
startDate: '2018-12-03T00:00:00.000Z',
weeklyHours: 9,
_id: new ObjectId(),
},
],
};
const eventList = [
{
_id: new ObjectId(),
company: authCompany._id,
type: 'intervention',
startDate: '2022-05-12T09:00:00.000Z',
endDate: '2022-05-12T12:00:00.000Z',
auxiliary: auxiliaryId,
customer: customerId,
createdAt: '2022-05-01T09:00:00.000Z',
sector: new ObjectId(),
subscription: subscriptionIds[0],
address: {
fullAddress: '37 rue de ponthieu 75008 Paris',
zipCode: '75008',
city: 'Paris',
street: '37 rue de Ponthieu',
location: { type: 'Point', coordinates: [2.377133, 48.801389] },
},
},
{
_id: new ObjectId(),
company: authCompany._id,
type: 'intervention',
startDate: '2022-05-08T09:00:00.000Z',
endDate: '2022-05-08T11:00:00.000Z',
auxiliary: auxiliaryId,
customer: customerId,
createdAt: '2022-05-01T09:00:00.000Z',
sector: new ObjectId(),
subscription: subscriptionIds[1],
address: {
fullAddress: '30 Rue Traversière 75012 Paris',
zipCode: '75012',
city: 'Paris',
street: '30 Rue Traversière',
location: { type: 'Point', coordinates: [2.37413, 48.848278] },
},
},
{
_id: new ObjectId(),
company: authCompany._id,
type: 'intervention',
startDate: '2022-05-08T15:00:00.000Z',
endDate: '2022-05-08T16:00:00.000Z',
auxiliary: auxiliaryId,
customer: customerId,
createdAt: '2022-05-01T10:00:00.000Z',
sector: new ObjectId(),
subscription: subscriptionIds[2],
address: {
fullAddress: '62 Rue Brancion 75015 Paris',
zipCode: '75015',
city: 'Paris',
street: '62 Rue Brancion',
location: { type: 'Point', coordinates: [2.303387, 48.832701] },
},
},
{
_id: new ObjectId(),
company: authCompany._id,
type: 'internal_hour',
startDate: '2022-05-09T09:00:00.000Z',
endDate: '2022-05-09T12:00:00.000Z',
auxiliary: auxiliaryId,
internalHour: { _id: new ObjectId(), name: 'Formation' },
},
{
_id: new ObjectId(),
company: authCompany._id,
type: 'absence',
absenceNature: 'hourly',
absence: 'transport_accident',
startDate: '2022-05-08T09:00:00.000Z',
endDate: '2022-05-08T10:00:00.000Z',
auxiliary: auxiliaryId,
internalHour: { _id: new ObjectId(), name: 'Formation' },
},
// previous month
{
_id: new ObjectId(),
company: authCompany._id,
type: 'intervention',
startDate: '2022-04-12T09:00:00.000Z',
endDate: '2022-04-12T13:00:00.000Z',
auxiliary: auxiliaryId,
customer: customerId,
createdAt: '2022-05-01T09:00:00.000Z',
sector: new ObjectId(),
subscription: subscriptionIds[0],
address: {
fullAddress: '37 rue de ponthieu 75008 Paris',
zipCode: '75008',
city: 'Paris',
street: '37 rue de Ponthieu',
location: { type: 'Point', coordinates: [2.377133, 48.801389] },
},
},
{
_id: new ObjectId(),
company: authCompany._id,
type: 'intervention',
startDate: '2022-04-08T15:00:00.000Z',
endDate: '2022-04-08T16:30:00.000Z',
auxiliary: auxiliaryId,
customer: customerId,
createdAt: '2022-05-01T10:00:00.000Z',
sector: new ObjectId(),
subscription: subscriptionIds[1],
address: {
fullAddress: '30 Rue Traversière 75012 Paris',
zipCode: '75012',
city: 'Paris',
street: '30 Rue Traversière',
location: { type: 'Point', coordinates: [2.37413, 48.848278] },
},
},
{
_id: new ObjectId(),
company: authCompany._id,
type: 'intervention',
startDate: '2022-04-08T15:00:00.000Z',
endDate: '2022-04-08T16:30:00.000Z',
auxiliary: auxiliaryId,
customer: customerId,
createdAt: '2022-05-01T10:00:00.000Z',
sector: new ObjectId(),
subscription: subscriptionIds[3],
address: {
fullAddress: '62 Rue Brancion 75015 Paris',
zipCode: '75015',
city: 'Paris',
street: '62 Rue Brancion',
location: { type: 'Point', coordinates: [2.303387, 48.832701] },
},
},
{
_id: new ObjectId(),
company: authCompany._id,
type: 'internal_hour',
startDate: '2022-04-09T09:00:00.000Z',
endDate: '2022-04-09T12:00:00.000Z',
auxiliary: auxiliaryId,
internalHour: { _id: new ObjectId(), name: 'Formation' },
},
];
const customer = {
_id: customerId,
company: authCompany._id,
identity: { title: 'mr', firstname: 'Toto', lastname: 'Tata' },
sectors: ['1e*'],
contact: {
primaryAddress: {
fullAddress: '37 rue de ponthieu 75008 Paris',
zipCode: '75',
city: 'Paris',
street: '37 rue de Ponthieu',
location: { type: 'Point', coordinates: [2.377133, 48.801389] },
},
},
subscriptions: [
{
_id: subscriptionIds[0],
service: serviceIds[0],
versions: [{
unitTTCRate: 12,
weeklyHours: 12,
evenings: 2,
sundays: 1,
startDate: '2018-01-01T10:00:00.000Z',
}],
},
{
_id: subscriptionIds[1],
service: serviceIds[1],
versions: [{
unitTTCRate: 100,
weeklyHours: 12,
evenings: 0,
sundays: 3,
startDate: '2018-01-03T10:00:00.000Z',
}],
},
{
_id: subscriptionIds[2],
service: serviceIds[2],
versions: [{
unitTTCRate: 4,
weeklyHours: 14,
evenings: 0,
sundays: 1,
startDate: '2018-01-03T10:00:00.000Z',
}],
},
{
_id: subscriptionIds[3],
service: serviceIds[3],
versions: [{
unitTTCRate: 5,
weeklyHours: 4,
evenings: 0,
sundays: 1,
startDate: '2018-01-03T10:00:00.000Z',
}],
},
],
};
const surcharge = {
_id: new ObjectId(),
name: 'surplus',
sunday: 30,
company: authCompany._id,
};
const serviceList = [
{
_id: serviceIds[0],
company: authCompany._id,
versions: [{
defaultUnitAmount: 12,
name: 'Service A',
exemptFromCharges: false,
startDate: '2019-01-16T00:00:00.000Z',
vat: 12,
}],
nature: 'hourly',
},
{
_id: serviceIds[1],
company: authCompany._id,
versions: [{
defaultUnitAmount: 30,
name: 'Service B',
exemptFromCharges: true,
startDate: '2019-01-30T00:00:00.000Z',
vat: 20,
}],
nature: 'hourly',
},
{
_id: serviceIds[2],
company: authCompany._id,
versions: [{
defaultUnitAmount: 100,
name: 'Service C',
exemptFromCharges: true,
startDate: '2019-01-30T00:00:00.000Z',
vat: 5,
surcharge: surcharge._id,
}],
nature: 'hourly',
},
{
_id: serviceIds[3],
company: authCompany._id,
versions: [{
defaultUnitAmount: 33,
name: 'Service D',
exemptFromCharges: false,
startDate: '2019-01-30T00:00:00.000Z',
vat: 5,
surcharge: surcharge._id,
}],
nature: 'hourly',
},
];
const distanceMatrix = {
_id: new ObjectId(),
company: authCompany._id,
origins: '30 Rue Traversière 75012 Paris',
destinations: '62 Rue Brancion 75015 Paris',
mode: 'driving',
distance: 6532,
duration: 1458,
};
const sector = { name: 'Toto', _id: sectorId, company: authCompany._id };
const sectorHistory = { auxiliary: auxiliaryId, sector: sectorId, company: authCompany._id, startDate: '2018-12-10' };
const populateDB = async () => {
await deleteNonAuthenticationSeeds();
await Promise.all([
Contract.create(contract),
Customer.create(customer),
DistanceMatrix.create(distanceMatrix),
Event.create(eventList),
Sector.create(sector),
SectorHistory.create(sectorHistory),
Service.create(serviceList),
Surcharge.create(surcharge),
User.create([user, auxiliary, auxiliaryFromOtherCompany]),
UserCompany.create(userCompanyList),
]);
};
module.exports = { populateDB, auxiliary, auxiliaryFromOtherCompany, surcharge };
|
function factorial(x) {
if (x <= 1) return x;
return x * factorial(x-1);
} |
namespace ts.projectSystem {
describe("unittests:: tsserver:: webServer", () => {
class TestWorkerSession extends server.WorkerSession {
constructor(host: server.ServerHost, webHost: server.HostWithWriteMessage, options: Partial<server.StartSessionOptions>, logger: server.Logger) {
super(
host,
webHost,
{
globalPlugins: undefined,
pluginProbeLocations: undefined,
allowLocalPluginLoads: undefined,
useSingleInferredProject: true,
useInferredProjectPerProjectRoot: false,
suppressDiagnosticEvents: false,
noGetErrOnBackgroundUpdate: true,
syntaxOnly: undefined,
serverMode: undefined,
...options
},
logger,
server.nullCancellationToken,
() => emptyArray
);
}
getProjectService() {
return this.projectService;
}
}
function setup(logLevel: server.LogLevel | undefined) {
const host = createServerHost([libFile], { windowsStyleRoot: "c:/" });
const messages: any[] = [];
const webHost: server.WebHost = {
readFile: s => host.readFile(s),
fileExists: s => host.fileExists(s),
writeMessage: s => messages.push(s),
};
const webSys = server.createWebSystem(webHost, emptyArray, () => host.getExecutingFilePath());
const logger = logLevel !== undefined ? new server.MainProcessLogger(logLevel, webHost) : nullLogger;
const session = new TestWorkerSession(webSys, webHost, { serverMode: LanguageServiceMode.PartialSemantic }, logger);
return { getMessages: () => messages, clearMessages: () => messages.length = 0, session };
}
describe("open files are added to inferred project and semantic operations succeed", () => {
function verify(logLevel: server.LogLevel | undefined) {
const { session, clearMessages, getMessages } = setup(logLevel);
const service = session.getProjectService();
const file: File = {
path: "^memfs:/sample-folder/large.ts",
content: "export const numberConst = 10; export const arrayConst: Array<string> = [];"
};
session.executeCommand({
seq: 1,
type: "request",
command: protocol.CommandTypes.Open,
arguments: {
file: file.path,
fileContent: file.content
}
});
checkNumberOfProjects(service, { inferredProjects: 1 });
const project = service.inferredProjects[0];
checkProjectActualFiles(project, ["/lib.d.ts", file.path]); // Lib files are rooted
verifyQuickInfo();
verifyGotoDefInLib();
function verifyQuickInfo() {
clearMessages();
const start = protocolFileLocationFromSubstring(file, "numberConst");
session.onMessage({
seq: 2,
type: "request",
command: protocol.CommandTypes.Quickinfo,
arguments: start
});
assert.deepEqual(last(getMessages()), {
seq: 0,
type: "response",
command: protocol.CommandTypes.Quickinfo,
request_seq: 2,
success: true,
performanceData: undefined,
body: {
kind: ScriptElementKind.constElement,
kindModifiers: "export",
start: { line: start.line, offset: start.offset },
end: { line: start.line, offset: start.offset + "numberConst".length },
displayString: "const numberConst: 10",
documentation: "",
tags: []
}
});
verifyLogger();
}
function verifyGotoDefInLib() {
clearMessages();
const start = protocolFileLocationFromSubstring(file, "Array");
session.onMessage({
seq: 3,
type: "request",
command: protocol.CommandTypes.DefinitionAndBoundSpan,
arguments: start
});
assert.deepEqual(last(getMessages()), {
seq: 0,
type: "response",
command: protocol.CommandTypes.DefinitionAndBoundSpan,
request_seq: 3,
success: true,
performanceData: undefined,
body: {
definitions: [{
file: "/lib.d.ts",
...protocolTextSpanWithContextFromSubstring({
fileText: libFile.content,
text: "Array",
contextText: "interface Array<T> { length: number; [n: number]: T; }"
})
}],
textSpan: {
start: { line: start.line, offset: start.offset },
end: { line: start.line, offset: start.offset + "Array".length },
}
}
});
verifyLogger();
}
function verifyLogger() {
const messages = getMessages();
assert.equal(messages.length, logLevel === server.LogLevel.verbose ? 4 : 1, `Expected ${JSON.stringify(messages)}`);
if (logLevel === server.LogLevel.verbose) {
verifyLogMessages(messages[0], "info");
verifyLogMessages(messages[1], "perf");
verifyLogMessages(messages[2], "info");
}
clearMessages();
}
function verifyLogMessages(actual: any, expectedLevel: server.MessageLogLevel) {
assert.equal(actual.type, "log");
assert.equal(actual.level, expectedLevel);
}
}
it("with logging enabled", () => {
verify(server.LogLevel.verbose);
});
it("with logging disabled", () => {
verify(/*logLevel*/ undefined);
});
});
});
}
|
<gh_stars>0
(window["webpackJsonp"] = window["webpackJsonp"] || []).push([[91],{
/***/ "./node_modules/babel-loader/lib/index.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue?vue&type=script&lang=js&":
/*!*****************************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib??ref--4-0!./node_modules/vue-loader/lib??vue-loader-options!./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue?vue&type=script&lang=js& ***!
\*****************************************************************************************************************************************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
/* harmony default export */ __webpack_exports__["default"] = ({
data: function data() {
return {
form: {
shipment_type_id: null,
shipment_sub_type_id: null
},
shipmentTypes: null,
shipmentSubTypes: null,
selectedShipmentType: null,
selectedShipmentSubType: null
};
},
methods: {
getShipments: function getShipments() {
var _this = this;
this.getRecord('/api/shipmentTypes', {
all: true
}, 'Shipment Sub-Types').then(function (response) {
_this.shipmentTypes = response.data;
});
},
getShipmentValue: function getShipmentValue(value) {
this.form.shipment_type_id = value.id;
},
clearShipmentInput: function clearShipmentInput() {
this.selectedShipmentType = null;
},
getShipmentsSubTypes: function getShipmentsSubTypes() {
var _this2 = this;
this.getRecord('/api/shipmentTypes', {
all: true
}, 'Shipment Sub-Types').then(function (response) {
_this2.shipmentSubTypes = response.data;
});
},
getShipmentSubTypeValue: function getShipmentSubTypeValue(value) {
this.form.shipment_sub_type_id = value.id;
},
clearShipmentSubTypeInput: function clearShipmentSubTypeInput() {
this.selectedShipmentSubType = null;
}
},
created: function created() {
this.getShipments();
this.getShipmentsSubTypes();
}
});
/***/ }),
/***/ "./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue?vue&type=template&id=bff90478&":
/*!*********************************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib??vue-loader-options!./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue?vue&type=template&id=bff90478& ***!
\*********************************************************************************************************************************************************************************************************************************/
/*! exports provided: render, staticRenderFns */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "render", function() { return render; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "staticRenderFns", function() { return staticRenderFns; });
var render = function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("div", [
_c("div", { staticClass: "form-group col-md-12" }, [
_vm._m(0),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-md-12" },
[
_c("ValidationProvider", {
attrs: { rules: "required", name: "Shipment type" },
scopedSlots: _vm._u([
{
key: "default",
fn: function(ref) {
var errors = ref.errors
return [
_c(
"v-select",
{
attrs: { filterable: true, options: _vm.shipmentTypes },
on: {
input: _vm.getShipmentValue,
"search:focus": _vm.clearShipmentInput
},
scopedSlots: _vm._u(
[
{
key: "option",
fn: function(option) {
return [
_c("div", { staticClass: "d-center" }, [
_vm._v(
"\n " +
_vm._s(option.name) +
"\n "
)
])
]
}
},
{
key: "selected-option",
fn: function(option) {
return [
_c(
"div",
{ staticClass: "selected d-center" },
[
_vm._v(
"\n " +
_vm._s(option.name) +
"\n "
)
]
)
]
}
}
],
null,
true
),
model: {
value: _vm.selectedShipmentType,
callback: function($$v) {
_vm.selectedShipmentType = $$v
},
expression: "selectedShipmentType"
}
},
[
_c("template", { slot: "no-options" }, [
_vm._v(
"\n Select shipment type\n "
)
])
],
2
),
_vm._v(" "),
_c("span", { staticClass: "form-control-feedback" }, [
_vm._v(_vm._s(errors[0]))
])
]
}
}
])
})
],
1
)
]),
_vm._v(" "),
_c("div", { staticClass: "form-group col-md-12" }, [
_vm._m(1),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-md-12" },
[
_c("ValidationProvider", {
attrs: { rules: "required", name: "Shipment sub-type" },
scopedSlots: _vm._u([
{
key: "default",
fn: function(ref) {
var errors = ref.errors
return [
_c(
"v-select",
{
attrs: {
filterable: true,
options: _vm.shipmentSubTypes
},
on: {
input: _vm.getShipmentSubTypeValue,
"search:focus": _vm.clearShipmentSubTypeInput
},
scopedSlots: _vm._u(
[
{
key: "option",
fn: function(option) {
return [
_c("div", { staticClass: "d-center" }, [
_vm._v(
"\n " +
_vm._s(option.name) +
"\n "
)
])
]
}
},
{
key: "selected-option",
fn: function(option) {
return [
_c(
"div",
{ staticClass: "selected d-center" },
[
_vm._v(
"\n " +
_vm._s(option.name) +
"\n "
)
]
)
]
}
}
],
null,
true
),
model: {
value: _vm.selectedShipmentSubType,
callback: function($$v) {
_vm.selectedShipmentSubType = $$v
},
expression: "selectedShipmentSubType"
}
},
[
_c("template", { slot: "no-options" }, [
_vm._v(
"\n Select shipment sub type\n "
)
])
],
2
),
_vm._v(" "),
_c("span", { staticClass: "form-control-feedback" }, [
_vm._v(_vm._s(errors[0]))
])
]
}
}
])
})
],
1
)
])
])
}
var staticRenderFns = [
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("label", { staticClass: "col-md-12 control-label" }, [
_vm._v("\n Shipment Type\n "),
_c("span", { staticClass: "text-danger" }, [_vm._v("*")])
])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("label", { staticClass: "col-md-12 control-label" }, [
_vm._v("\n Shipment Sub-Type\n "),
_c("span", { staticClass: "text-danger" }, [_vm._v("*")])
])
}
]
render._withStripped = true
/***/ }),
/***/ "./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue":
/*!********************************************************************!*\
!*** ./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue ***!
\********************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _Index_vue_vue_type_template_id_bff90478___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./Index.vue?vue&type=template&id=bff90478& */ "./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue?vue&type=template&id=bff90478&");
/* harmony import */ var _Index_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./Index.vue?vue&type=script&lang=js& */ "./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue?vue&type=script&lang=js&");
/* empty/unused harmony star reexport *//* harmony import */ var _node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../../../../../node_modules/vue-loader/lib/runtime/componentNormalizer.js */ "./node_modules/vue-loader/lib/runtime/componentNormalizer.js");
/* normalize component */
var component = Object(_node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__["default"])(
_Index_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__["default"],
_Index_vue_vue_type_template_id_bff90478___WEBPACK_IMPORTED_MODULE_0__["render"],
_Index_vue_vue_type_template_id_bff90478___WEBPACK_IMPORTED_MODULE_0__["staticRenderFns"],
false,
null,
null,
null
)
/* hot reload */
if (false) { var api; }
component.options.__file = "resources/assets/js/views/JobWorkflow/StageTypes/Index.vue"
/* harmony default export */ __webpack_exports__["default"] = (component.exports);
/***/ }),
/***/ "./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue?vue&type=script&lang=js&":
/*!*********************************************************************************************!*\
!*** ./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue?vue&type=script&lang=js& ***!
\*********************************************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _node_modules_babel_loader_lib_index_js_ref_4_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Index_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../../node_modules/babel-loader/lib??ref--4-0!../../../../../../node_modules/vue-loader/lib??vue-loader-options!./Index.vue?vue&type=script&lang=js& */ "./node_modules/babel-loader/lib/index.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue?vue&type=script&lang=js&");
/* empty/unused harmony star reexport */ /* harmony default export */ __webpack_exports__["default"] = (_node_modules_babel_loader_lib_index_js_ref_4_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Index_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__["default"]);
/***/ }),
/***/ "./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue?vue&type=template&id=bff90478&":
/*!***************************************************************************************************!*\
!*** ./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue?vue&type=template&id=bff90478& ***!
\***************************************************************************************************/
/*! exports provided: render, staticRenderFns */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Index_vue_vue_type_template_id_bff90478___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../../node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!../../../../../../node_modules/vue-loader/lib??vue-loader-options!./Index.vue?vue&type=template&id=bff90478& */ "./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/JobWorkflow/StageTypes/Index.vue?vue&type=template&id=bff90478&");
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "render", function() { return _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Index_vue_vue_type_template_id_bff90478___WEBPACK_IMPORTED_MODULE_0__["render"]; });
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "staticRenderFns", function() { return _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Index_vue_vue_type_template_id_bff90478___WEBPACK_IMPORTED_MODULE_0__["staticRenderFns"]; });
/***/ })
}]); |
<filename>smarthome-kit/home_automation/remote_hvac/module_display.h<gh_stars>1-10
/*
display.h
-----------------------------------------------------------------------------
Copyright (c) 2017, MikroElektonika - http://www.mikroe.com
All rights reserved.
----------------------------------------------------------------------------- */
/**
\file display.h
\brief DISPLAY Module
\defgroup DISPLAY
\brief DISPLAY Module
\{
| Global Library Prefix | **DISPLAY** |
|:---------------------:|:---------------------:|
| Version | **1.0.0** |
| Date | **Feb 2019.** |
| Developer | **<NAME>** |
---
**Version Info :**
- **1.0.0** Module Created [<NAME>]
*/
/* -------------------------------------------------------------------------- */
#ifndef _DISPLAY_H
#define _DISPLAY_H
#include "../module_common.h"
#include "module_hvac.h"
#include "module_display_ui_setup.h"
/* -------------------------------------------------------------------- TYPES */
typedef struct
{
MODULE_STATE state;
} DISPLAY_DATA;
/* ---------------------------------------------------------------- FUNCTIONS */
#ifdef __cplusplus
extern "C" {
#endif
/**
\brief Display Initialization Routine.
This function initializes the Display module.
*/
void DISPLAY_Initialize ( void );
/**
\brief Display Tasks Function
This routine is the Display module tasks function.
*/
void DISPLAY_Tasks ( void );
void DISPLAY_ISR_Handler ( void );
#ifdef __cplusplus
}
#endif
#endif
/// \}
/* -------------------------------------------------------------------------- */
/*
display.h
Copyright (c) 2017, MikroElektonika - http://www.mikroe.com
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. All advertising materials mentioning features or use of this software
must display the following acknowledgement:
This product includes software developed by the MikroElektonika.
4. Neither the name of the MikroElektonika nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY MIKROELEKTRONIKA ''AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL MIKROELEKTRONIKA BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------------- */
|
sqlplus $SYS_USER/$SYS_PASS@$CONNECT_STRING as sysdba <<EOF >> $CRPSCRIPTS_DIR/logs/commonscriptsUserCreation.log
drop user $PROCESS_SCHEMA_NAME cascade;
create user $PROCESS_SCHEMA_NAME identified by $PROCESS_SCHEMA_PASS default tablespace $PROCESS_SCHEMA_TABLESPACE;
grant resource to $PROCESS_SCHEMA_NAME;
grant connect to $PROCESS_SCHEMA_NAME;
#grant dba to $PROCESS_SCHEMA_NAME;
grant execute on dbms_lock to $PROCESS_SCHEMA_NAME;
grant execute on dbms_utility to $PROCESS_SCHEMA_NAME;
grant execute on DBMS_PIPE to $PROCESS_SCHEMA_NAME;
grant execute on dbms_system to $PROCESS_SCHEMA_NAME;
GRANT SELECT ON V_\$INSTANCE TO $PROCESS_SCHEMA_NAME;
GRANT SELECT ON GV_\$SESSION TO $PROCESS_SCHEMA_NAME;
#GRANT ALTER SYSTEM TO $PROCESS_SCHEMA_NAME;
EOF
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.